From 8a509ea2f8870af4b2cc2eab89b9142a6356ed75 Mon Sep 17 00:00:00 2001 From: "Pillai, Venkatesh" Date: Mon, 20 Nov 2023 12:44:29 +0530 Subject: [PATCH] camera_mediation_on_aow camera mediation is enabled for AoW on Android T code Signed-off-by: Pillai, Venkatesh --- Android.mk | 1 + include/CameraSocketCommand.h | 3 +- include/CameraSocketServerThread.h | 11 ++ src/CameraSocketCommand.cpp | 2 + src/CameraSocketServerThread.cpp | 197 +++++++++++++++++++++++++-- src/VirtualCameraFactory.cpp | 27 +++- src/VirtualFakeCamera3.cpp | 15 +++ src/fake-pipeline2/Sensor.cpp | 209 ++++++++++++++++++++++++++++- 8 files changed, 447 insertions(+), 18 deletions(-) diff --git a/Android.mk b/Android.mk index 1d8e8c5..29143d8 100644 --- a/Android.mk +++ b/Android.mk @@ -175,6 +175,7 @@ ifeq ($(TARGET_BOARD_PLATFORM), celadon) camera_vhal_cflags += -DGRALLOC_MAPPER4 else camera_vhal_cflags += -DENABLE_FFMPEG +camera_vhal_cflags += -DUSE_PIPE endif LOCAL_MODULE_RELATIVE_PATH := ${camera_vhal_module_relative_path} diff --git a/include/CameraSocketCommand.h b/include/CameraSocketCommand.h index a9a1522..cd7e5b8 100644 --- a/include/CameraSocketCommand.h +++ b/include/CameraSocketCommand.h @@ -33,7 +33,7 @@ namespace android { namespace socket { enum class VideoCodecType { kH264 = 1, kH265 = 2,kI420 = 4, kMJPEG = 8, kAll = 15 }; -enum class FrameResolution { k480p = 1, k720p = 2, k1080p = 4, kAll = 7 }; +enum class FrameResolution { k480p = 1, k720p = 2, k1080p = 4, kWXGA = 8, kAll = 15 }; enum class SensorOrientation { ORIENTATION_0 = 0, @@ -63,6 +63,7 @@ typedef struct _camera_config { uint32_t cameraId; uint32_t codec_type; uint32_t resolution; + char pkg_name[128]; uint32_t reserved[5]; } camera_config_t; diff --git a/include/CameraSocketServerThread.h b/include/CameraSocketServerThread.h index 4214ffb..8c70fdd 100644 --- a/include/CameraSocketServerThread.h +++ b/include/CameraSocketServerThread.h @@ -35,6 +35,7 @@ #endif #include "CameraSocketCommand.h" #include +#include "VirtualBuffer.h" namespace android { @@ -43,6 +44,7 @@ enum tranSock UNIX = 0, TCP = 1, VSOCK = 2, + PIPE = 3, }; class VirtualCameraFactory; @@ -64,11 +66,20 @@ class CameraSocketServerThread : public Thread { ssize_t size_update = 0; static void* threadFunc(void * arg); + pthread_cond_t mSignalHotplug = PTHREAD_COND_INITIALIZER; + pthread_mutex_t mHotplugLock = PTHREAD_MUTEX_INITIALIZER; + + + int UpdateCameraInfo(); + bool configureCapabilities(bool skipCapRead); private: virtual status_t readyToRun(); virtual bool threadLoop() override; + bool ProcessCameraDataFromPipe(ClientVideoBuffer *handle); + ssize_t recvData(int handle, char *pkt, int size, int flag); + ssize_t sendData(int handle, char *pkt, int size, int flag); void setCameraResolution(uint32_t resolution); void setCameraMaxSupportedResolution(int32_t width, int32_t height); diff --git a/src/CameraSocketCommand.cpp b/src/CameraSocketCommand.cpp index 9c8eceb..c25b8db 100644 --- a/src/CameraSocketCommand.cpp +++ b/src/CameraSocketCommand.cpp @@ -54,6 +54,8 @@ const char* codec_type_to_str(uint32_t type) { return "H264"; case int(android::socket::VideoCodecType::kH265): return "H265"; + case int(android::socket::VideoCodecType::kI420): + return "I420"; default: return "invalid"; } diff --git a/src/CameraSocketServerThread.cpp b/src/CameraSocketServerThread.cpp index 342cf9b..c3ecad9 100644 --- a/src/CameraSocketServerThread.cpp +++ b/src/CameraSocketServerThread.cpp @@ -69,6 +69,7 @@ bool gCameraFacingBack; bool gStartMetadataUpdate; bool gDoneMetadataUpdate; +int gDataPipeHandle = -1; using namespace socket; #ifdef ENABLE_FFMPEG @@ -91,7 +92,7 @@ CameraSocketServerThread::CameraSocketServerThread(std::string suffix, int err = pthread_create(&threadId, NULL, (THREADFUNCPTR) &CameraSocketServerThread::threadFunc, this); if(err) ALOGE("thread create failed"); - + ALOGI("%s camera socket server path is %s", __FUNCTION__, mSocketPath.c_str()); mNumOfCamerasRequested = 0; } @@ -158,6 +159,10 @@ void CameraSocketServerThread::setCameraResolution(uint32_t resolution) { gCameraMaxWidth = 1920; gCameraMaxHeight = 1080; break; + case uint32_t(FrameResolution::kWXGA): + gCameraMaxWidth = 640; + gCameraMaxHeight = 360; + break; default: break; } @@ -167,6 +172,22 @@ void CameraSocketServerThread::setCameraResolution(uint32_t resolution) { setCameraMaxSupportedResolution(gCameraMaxWidth, gCameraMaxHeight); } +ssize_t CameraSocketServerThread::recvData(int handle, char *pkt, int size, int flag) { +#ifndef USE_PIPE + return recv(handle, pkt, size, flag); +#else + return read(handle, pkt, size); +#endif +} + +ssize_t CameraSocketServerThread::sendData(int handle, char *pkt, int size, int flag) { +#ifndef USE_PIPE + return send(handle, pkt, size, flag); +#else + return write(handle, pkt, size); +#endif +} + bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { ALOGVV(LOG_TAG " %s Enter", __FUNCTION__); @@ -186,7 +207,7 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { camera_packet_t *ack_packet = NULL; camera_header_t header = {}; if(!skipCapRead) { - if ((recv_size = recv(mClientFd, (char *)&header, sizeof(camera_header_t), MSG_WAITALL)) < 0) { + if ((recv_size = recvData(mClientFd, (char *)&header, sizeof(camera_header_t), MSG_WAITALL)) < 0) { ALOGE(LOG_TAG "%s: Failed to receive header, err: %s ", __FUNCTION__, strerror(errno)); goto out; } @@ -211,14 +232,14 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { capability.maxNumberOfCameras = MAX_NUMBER_OF_SUPPORTED_CAMERAS; memcpy(cap_packet->payload, &capability, sizeof(camera_capability_t)); - if (send(mClientFd, cap_packet, cap_packet_size, 0) < 0) { + if (sendData(mClientFd,(char *)cap_packet, cap_packet_size, 0) < 0) { ALOGE(LOG_TAG "%s: Failed to send camera capabilities, err: %s ", __FUNCTION__, strerror(errno)); goto out; } ALOGI(LOG_TAG "%s: Sent CAPABILITY packet to client", __FUNCTION__); - if ((recv_size = recv(mClientFd, (char *)&header, sizeof(camera_header_t), MSG_WAITALL)) < 0) { + if ((recv_size = recvData(mClientFd, (char *)&header, sizeof(camera_header_t), MSG_WAITALL)) < 0) { ALOGE(LOG_TAG "%s: Failed to receive header, err: %s ", __FUNCTION__, strerror(errno)); goto out; } @@ -250,7 +271,7 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { // Update the number of cameras globally to create camera pipeline. gMaxNumOfCamerasSupported = mNumOfCamerasRequested; } - if ((recv_size = recv(mClientFd, (char *)&camera_info, + if ((recv_size = recvData(mClientFd, (char *)&camera_info, mNumOfCamerasRequested * sizeof(camera_info_t), MSG_WAITALL)) < 0) { ALOGE(LOG_TAG "%s: Failed to receive camera info, err: %s ", __FUNCTION__, strerror(errno)); goto out; @@ -295,6 +316,7 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { } switch (camera_info[i].resolution) { + case uint32_t(FrameResolution::kWXGA): case uint32_t(FrameResolution::k480p): case uint32_t(FrameResolution::k720p): case uint32_t(FrameResolution::k1080p): @@ -349,6 +371,7 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { // Going to update metadata for each camera, so update the status. gStartMetadataUpdate = false; gDoneMetadataUpdate = false; + camera_info[i].sensorOrientation = 0; camera_id = i; ALOGI(LOG_TAG "%s - Client requested for codec_type: %s, resolution: %s, orientation: %u, and " @@ -413,6 +436,18 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { "hence selected default", __FUNCTION__); } + +#if 0 + // Start updating metadata for one camera, so update the status. + gStartMetadataUpdate = true; + + // Wait till complete the metadata update for a camera. + while (!gDoneMetadataUpdate) { + ALOGVV("%s: wait till complete the metadata update for a camera", __FUNCTION__); + // 200us sleep for this thread. + std::this_thread::sleep_for(std::chrono::microseconds(200)); + } +#endif gVirtualCameraFactory.createVirtualRemoteCamera(gVirtualCameraFactory.mSocketServer, camera_id); } @@ -430,7 +465,7 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { ack_packet->header.size = sizeof(camera_ack_t); memcpy(ack_packet->payload, &ack_payload, sizeof(camera_ack_t)); - if (send(mClientFd, ack_packet, ack_packet_size, 0) < 0) { + if (sendData(mClientFd, (char *)ack_packet, ack_packet_size, 0) < 0) { ALOGE(LOG_TAG "%s: Failed to send camera capabilities, err: %s ", __FUNCTION__, strerror(errno)); goto out; @@ -446,6 +481,63 @@ bool CameraSocketServerThread::configureCapabilities(bool skipCapRead) { return status; } + +bool CameraSocketServerThread::ProcessCameraDataFromPipe(ClientVideoBuffer *handle) { + int size_header =0; + ssize_t size_pending =0; + camera_header_t buffer_header = {}; +ALOGE("ProcessCameraDataFromPipe start\n"); + int retryCount = 0; + uint8_t *fbuffer = (uint8_t *)handle->clientBuf[handle->clientRevCount % 1].buffer; + size_header = read(gDataPipeHandle, (char *)&buffer_header, sizeof(camera_header_t)); + if(buffer_header.type == CAMERA_DATA){ + + size_pending = buffer_header.size; + while(size_pending != 0){ + ssize_t size_data = 0; + size_data = read(gDataPipeHandle, (char *)fbuffer+size_update, size_pending); + + if(size_data < 0){ + if(retryCount > 3) { + ALOGE("Dropping frame \n"); + break; + } + retryCount++; + ALOGE(LOG_TAG "entered into recv error, break to recover"); + continue; + } + size_update += size_data; + size_pending -= size_data; + if (size_pending == 0){ + handle->clientRevCount++; +#if 0 + FILE *fp_dump = fopen("/data/dump.yuv","w"); + if(fp_dump != NULL){ + fwrite(fbuffer,size_update,1,fp_dump); + ALOGE(LOG_TAG "dump camera frame"); + fclose(fp_dump); + } +#endif + size_update = 0; + ALOGE(LOG_TAG "[I420] %s: Packet rev %d and " + "size %zd", + __FUNCTION__, handle->clientRevCount, size_data); + break; + } + } + } else if(buffer_header.type == REQUEST_CAPABILITY){ + ALOGE("Calling request Capability \n"); + if(!configureCapabilities(true)) { + return false; + } + } else { + ALOGE("invalid packet received"); + return false; + } + return true; +} + + bool CameraSocketServerThread::threadLoop() { return true; } @@ -462,6 +554,7 @@ void* CameraSocketServerThread::threadFunc(void *arg) { struct sockaddr_vm addr_vm ; struct sockaddr_in addr_ip; int trans_mode = 0; + int pipe_handle = -1; char mode[PROPERTY_VALUE_MAX]; if ((property_get("ro.vendor.camera.transference", mode, nullptr) > 0) ){ @@ -477,9 +570,16 @@ void* CameraSocketServerThread::threadFunc(void *arg) { //Fall back to unix socket by default //trans_mode = UNIX; //D to do +#ifndef USE_PIPE trans_mode = VSOCK; +#else + trans_mode = PIPE; +#endif ALOGV("%s: falling back to UNIX as the trans mode is not set",__FUNCTION__); } + + trans_mode = PIPE; + if(trans_mode == UNIX) { threadParam->mSocketServerFd = ::socket(AF_UNIX, SOCK_STREAM, 0); @@ -575,7 +675,7 @@ void* CameraSocketServerThread::threadFunc(void *arg) { ALOGV("%s Failed to listen on ", __FUNCTION__); return NULL; } - }else{ + }else if(trans_mode == VSOCK){ memset(&addr_ip, 0, sizeof(addr_ip)); addr_vm.svm_family = AF_VSOCK; addr_vm.svm_port = 1982; @@ -591,7 +691,7 @@ void* CameraSocketServerThread::threadFunc(void *arg) { } ret = ::bind(threadParam->mSocketServerFd, (struct sockaddr *)&addr_vm, sizeof(struct sockaddr_vm)); - if (ret < 0) { + if (ret < 0) {. ALOGV(LOG_TAG " %s Failed to bind port(%d). ret: %d, %s", __func__, addr_vm.svm_port, ret, strerror(errno)); return NULL; @@ -603,6 +703,85 @@ void* CameraSocketServerThread::threadFunc(void *arg) { } } + + } else if(trans_mode == PIPE) { +ALOGE("Shiva trans mode pipe \n"); + while (mRunning) { + if(trans_mode == PIPE) { + while (1) { + pipe_handle = open("/dev/virtpipe-common", O_RDWR); + if (pipe_handle < 0) { + ALOGD("%s open /dev/virtpipe-common fail errno=%d, error=%s\n", __FUNCTION__, errno, strerror(errno)); + sleep(1); + continue; + } else { + break; + } + } + while(1) { + ALOGD("%s: after pipe open writing camera_ctrl to create pipe handle \n", __FUNCTION__); + if (write(pipe_handle, "camera_ctrl", strlen("camera_ctrl")) < 0) { + ALOGE("%s: open pipe fail...\n", __FUNCTION__); + sleep(1); + continue; + } else { + break; + } + } + + while (1) { + gDataPipeHandle = open("/dev/virtpipe-common", O_RDWR); + if (gDataPipeHandle < 0) { + ALOGD("%s open /dev/virtpipe-common fail errno=%d, error=%s\n", __FUNCTION__, errno, strerror(errno)); + sleep(1); + continue; + } else { + break; + } + } + while(1) { + ALOGD("%s: after pipe open writing camera_data to create pipe handle \n", __FUNCTION__); + if (write(gDataPipeHandle, "camera_data", strlen("camera_data")) < 0) { + ALOGE("%s: open camera data pipe fail...\n", __FUNCTION__); + sleep(1); + continue; + } else { + break; + } + } + ALOGE("pipe connected success \n"); + } + + mClientFd = pipe_handle; + + bool status = false; + status = configureCapabilities(false); + if (status) { + ALOGI(LOG_TAG + "%s: Capability negotiation and metadata update" + "for %d camera(s) completed successfully..", + __FUNCTION__, mNumOfCamerasRequested); + } + + ClientVideoBuffer *handle = ClientVideoBuffer::getClientInstance(); + uint8_t *fbuffer = (uint8_t *)handle->clientBuf[handle->clientRevCount % 1].buffer; + // Reset and clear the input buffer before receiving the frames. + handle->reset(); + + int retryLoop = 0; + while (true) { + if(!ProcessCameraDataFromPipe(handle)) { + retryLoop++; + if(retryLoop > 5) { + break; + } + sleep(1); + continue; + } + retryLoop = 0; + } + } +} while (threadParam->mRunning) { ALOGI(LOG_TAG " %s: Wait for camera client to connect. . .", __FUNCTION__); @@ -614,7 +793,7 @@ void* CameraSocketServerThread::threadFunc(void *arg) { socklen_t alen = sizeof(struct sockaddr_vm); new_client_fd = ::accept(threadParam->mSocketServerFd, (struct sockaddr *)&addr_vm, &alen); } - else + else if(trans_mode == UNIX) { socklen_t alen = sizeof(struct sockaddr_un); new_client_fd = ::accept(threadParam->mSocketServerFd, (struct sockaddr *)&addr_un, &alen); diff --git a/src/VirtualCameraFactory.cpp b/src/VirtualCameraFactory.cpp index e51c8ba..64bacc0 100644 --- a/src/VirtualCameraFactory.cpp +++ b/src/VirtualCameraFactory.cpp @@ -31,6 +31,8 @@ #include #include #include "VirtualBuffer.h" +#include + extern camera_module_t HAL_MODULE_INFO_SYM; /* @@ -39,6 +41,12 @@ extern camera_module_t HAL_MODULE_INFO_SYM; */ android::VirtualCameraFactory gVirtualCameraFactory; +struct buffer +{ + void *start; + size_t length; +}; + namespace android { bool gIsInFrameI420; @@ -104,9 +112,21 @@ bool VirtualCameraFactory::constructVirtualCamera() { // Allocate space for each cameras requested. if(mVirtualCameras != NULL) { + +for(int i = 0; i < mNumOfCamerasSupported; i++) { + if(mCallbacks != nullptr) { + mCallbacks->camera_device_status_change(mCallbacks, mVirtualCameras[i]->mCameraID, CAMERA_DEVICE_STATUS_NOT_PRESENT); + } else { + ALOGE("%s : Fail to update camera status to camera server\n", __FUNCTION__); + } + } + delete mVirtualCameras; mVirtualCameras = NULL; } + + mNumOfCamerasSupported = gMaxNumOfCamerasSupported; + mVirtualCameras = new VirtualBaseCamera *[mNumOfCamerasSupported]; if (mVirtualCameras == nullptr) { ALOGE("%s: Unable to allocate virtual camera array", __FUNCTION__); @@ -128,7 +148,7 @@ bool VirtualCameraFactory::createSocketServer() { mSocketServer = std::make_shared(id, std::ref(mCameraSessionState)); - + mSocketServer->run("FrontBackCameraSocketServerThread"); // TODO need to return false if error. return true; } @@ -299,6 +319,11 @@ void VirtualCameraFactory::createVirtualRemoteCamera( delete mVirtualCameras[cameraId]; } } + if(mCallbacks != nullptr) { + mCallbacks->camera_device_status_change(mCallbacks, cameraId, CAMERA_DEVICE_STATUS_PRESENT); + } else { + ALOGE("%s : Fail to update camera status to camera server\n", __FUNCTION__); + } } /******************************************************************************** diff --git a/src/VirtualFakeCamera3.cpp b/src/VirtualFakeCamera3.cpp index 6a71197..f2927b7 100644 --- a/src/VirtualFakeCamera3.cpp +++ b/src/VirtualFakeCamera3.cpp @@ -209,7 +209,10 @@ status_t VirtualFakeCamera3::sendCommandToClient(camera_cmd_t cmd) { camera_config_cmd_t config_cmd = {}; config_cmd.version = CAMERA_VHAL_VERSION_2; config_cmd.cmd = cmd; + char prop_val[PROPERTY_VALUE_MAX] = {'\0'}; + property_get("vendor.camera.app.name", prop_val, "false"); config_cmd.config.cameraId = mCameraID; + strncpy(config_cmd.config.pkg_name, prop_val, PROPERTY_VALUE_MAX); config_cmd.config.codec_type = mCodecType; config_cmd.config.resolution = mDecoderResolution; @@ -232,11 +235,22 @@ status_t VirtualFakeCamera3::sendCommandToClient(camera_cmd_t cmd) { memcpy(config_cmd_packet->payload, &config_cmd, sizeof(camera_config_cmd_t)); ALOGI("%s: Camera client fd %d!", __FUNCTION__, client_fd); +ALOGI("%s: Camera client fd %d! camera id %d", __FUNCTION__, client_fd, config_cmd.config.cameraId); +#ifdef USE_PIPE +ALOGE("Shiva use pipe \n"); + if (write(client_fd, config_cmd_packet, config_cmd_packet_size) < 0) { + ALOGE(LOG_TAG "%s: Failed to send Camera %s command to client, err %s ", __FUNCTION__, + (cmd == camera_cmd_t::CMD_CLOSE) ? "CloseCamera" : "OpenCamera", strerror(errno)); + goto out; + } + +#else if (send(client_fd, config_cmd_packet, config_cmd_packet_size, 0) < 0) { ALOGE(LOG_TAG "%s: Failed to send Camera %s command to client, err %s ", __FUNCTION__, (cmd == camera_cmd_t::CMD_CLOSE) ? "CloseCamera" : "OpenCamera", strerror(errno)); goto out; } +#endif ALOGI("%s: Sent cmd %s to client %d!", __FUNCTION__, (cmd == camera_cmd_t::CMD_CLOSE) ? "CloseCamera" : "OpenCamera", client_fd); @@ -1459,6 +1473,7 @@ status_t VirtualFakeCamera3::constructStaticInfo() { ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArray, 4); int32_t orientation = gCameraSensorOrientation; + orientation = 0; ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1); static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; diff --git a/src/fake-pipeline2/Sensor.cpp b/src/fake-pipeline2/Sensor.cpp index 48e1e21..0203d25 100644 --- a/src/fake-pipeline2/Sensor.cpp +++ b/src/fake-pipeline2/Sensor.cpp @@ -253,8 +253,84 @@ status_t Sensor::readyToRun() { return OK; } + +int getRotation(int *frontRot, int *backRot){ + + char *jsonString; + char *parseString; + FILE *fptr; + *frontRot = -1; + *backRot = -1; + char prop_val[PROPERTY_VALUE_MAX] = {'\0'}; + property_get("vendor.camera.app.name", prop_val, "false"); + char filePath[512] = "/data/share/config/"; + strcat(filePath, prop_val); + strcat(filePath, ".conf"); + ALOGV(" %s\n", filePath); + fptr = fopen(filePath,"rb"); + if(fptr) { + ALOGV(" file open success \n"); + fseek (fptr, 0, SEEK_END); + int length = ftell (fptr); + fseek (fptr, 0, SEEK_SET); + jsonString = (char*)malloc (length + 1); + if(jsonString) { + fread(jsonString, 1, length, fptr); + ALOGV("file content %s\n", jsonString); + jsonString[length] = '\0'; + parseString = strstr(jsonString, "camera_config"); + char rotFrontValue[128]; + char rotBackValue[128]; + if(parseString) { + char *frontString = strstr(parseString, "front_preview_rotation"); + if(frontString != NULL) { + frontString = strstr(frontString, ":"); + int i = 1; + char separator = ','; + while (frontString[i] != '\0') { + if (frontString[i] != separator) { + rotFrontValue[i-1] = frontString[i]; + } else { + rotFrontValue[i-1] = '\0'; + break; + } + i++; + } + ALOGV("front rotation %s\n",rotFrontValue); + } + + char *backString = strstr(parseString, "back_preview_rotation"); + if(backString != NULL) { + backString = strstr(backString, ":"); + int i = 1; + char separator = '}'; + while (backString[i] != '\0') { + if (backString[i] != separator) { + rotBackValue[i-1] = backString[i]; + } else { + rotBackValue[i-1] = '\0'; + break; + } + i++; + } + ALOGV("back rotation %s\n",rotBackValue); + } + } + *frontRot = atoi(rotFrontValue); + *backRot = atoi(rotBackValue); + } + free(jsonString); + fclose(fptr); + } else { + ALOGE("Fail to open folder \n"); + } +return 0; +} + + + //#define CROP_ROTATE -#ifdef CROP_ROTATE +#ifdef CROP_ROTATE_1 void bufferCropAndRotate(unsigned char * buff, unsigned char * buff_out){ // // Original frame Cropped frame Rotated frame Upscale frame @@ -320,6 +396,114 @@ void bufferCropAndRotate(unsigned char * buff, unsigned char * buff_out){ char buffer_recv[640*480*3/2]; #endif + +#define CROP_ROTATE +#ifdef CROP_ROTATE + +void bufferCropAndRotate(unsigned char * buff, unsigned char * buff_out, int height, int width){ +// +// Original frame Cropped frame Rotated frame Upscale frame +// -------------------- -------- -------------------- +// | | | | | | --------------- | | | | +// | | | | | | | | | | | | +// | | | | =======>> | | =======>> | | =======>> | | | | +// | | | | | | --------------- | | | | +// | | | | | | | | | | +// -------------------- -------- -------------------- +// 640x480 360x480 480x360 640x480 + ALOGI("bufferCropAndRotate"); + + std::unique_ptr cropped_buffer; + + int cropped_width = height; + int cropped_height = height; +if(height==480) +cropped_width=360; + int margin = (width-cropped_width)/2; //460 + + int rotated_height = cropped_width; + int rotated_width = cropped_height; + + int rotated_y_stride = rotated_width; + int rotated_uv_stride = rotated_width / 2; + + size_t rotated_size = + rotated_y_stride * rotated_height + rotated_uv_stride * rotated_height; + cropped_buffer.reset(new uint8_t[rotated_size]); + uint8_t* rotated_y_plane = cropped_buffer.get(); + uint8_t* rotated_u_plane = + rotated_y_plane + rotated_y_stride * rotated_height; + uint8_t* rotated_v_plane = + rotated_u_plane + rotated_uv_stride * rotated_height / 2; + int frontRot = -1, backRot = -1; + getRotation(&frontRot, &backRot); + int finalRot=0; + if(frontRot != -1) + finalRot=frontRot; + else + finalRot - backRot; + libyuv::RotationMode rotation_mode = libyuv::RotationMode::kRotate0; + + switch(finalRot){ + + case 0: + rotation_mode = libyuv::RotationMode::kRotate0; + break; + case 90: + rotation_mode = libyuv::RotationMode::kRotate90; + break; + case 180: + rotation_mode = libyuv::RotationMode::kRotate180; + break; + case 270: + rotation_mode = libyuv::RotationMode::kRotate270; + break; + +} + //libyuv::RotationMode rotation_mode = libyuv::RotationMode::kRotate90; + //libyuv::RotationMode rotation_mode = libyuv::RotationMode::kRotate270; + + ALOGE("%s Calling ConvertToI420",__FUNCTION__); + + int res = libyuv::ConvertToI420( + buff, width*height*3/2, rotated_y_plane, + rotated_y_stride, rotated_u_plane, rotated_uv_stride, rotated_v_plane, + rotated_uv_stride, margin, 0, width, + height, cropped_width, cropped_height, rotation_mode, + libyuv::FourCC::FOURCC_I420); + + if(res){ + ALOGE("%s Res True Return",__FUNCTION__); + + ALOGE("critical ConvertToI420 res:%d ", res); + return; + } + + ALOGE("%s Calling I420 Scale",__FUNCTION__); + + res = libyuv::I420Scale( + rotated_y_plane, rotated_y_stride, rotated_u_plane, rotated_uv_stride, + rotated_v_plane, rotated_uv_stride, rotated_width, rotated_height, + buff_out, width, + buff_out + width*height, + width / 2, + buff_out + width*height*5/4, + width/2, width, + height, libyuv::FilterMode::kFilterNone); + + if(res){ + ALOGE("%s Res true Scale",__FUNCTION__); + + ALOGE("critical I420Scale res:%d ", res); + } + +} + +char buffer_recv[1280*720*3/2]; + +#endif + + bool Sensor::threadLoop() { /** * Sensor capture operation main loop. @@ -415,7 +599,10 @@ bool Sensor::threadLoop() { ClientVideoBuffer *handle = ClientVideoBuffer::getClientInstance(); handle->clientBuf[handle->clientRevCount % 1].decoded = false; #ifdef CROP_ROTATE - char *fbuffer = (char *)handle->clientBuf[handle->clientRevCount % 1].buffer; + char prop_val[PROPERTY_VALUE_MAX] = {'\0'}; + property_get("vendor.camera.app.name", prop_val, "false"); + ALOGI("%s APP Name : %s",__FUNCTION__,prop_val); + char *fbuffer = (char *)handle->clientBuf[handle->clientRevCount % 1].buffer; bufferCropAndRotate((uint8_t*)fbuffer, (uint8_t*)buffer_recv); #endif @@ -893,6 +1080,10 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h // For I420 input support ALOGVV(LOG_TAG " %s: I420 no scaling required Size = %dx%d", __FUNCTION__, width, height); + +#ifdef USE_PIPE + memcpy(img, bufData, width * height * 1.5); +#else const uint8_t *src_y = bufData; int src_stride_y = mSrcWidth; const uint8_t *src_u = bufData + src_size; @@ -909,6 +1100,7 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv, width, height)) { } +#endif } else { ALOGVV(LOG_TAG " %s: [NON-SG1] convert I420 to NV21!", __FUNCTION__); if (int ret = libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, @@ -965,7 +1157,9 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h uint8_t *dst_uv = dst_y + width * height; int dst_stride_uv = width; - +#ifdef USE_PIPE + memcpy(img, mDstBuf.data(), width * height * 1.5); +#else if (m_major_version == 1) { ALOGVV(LOG_TAG " %s: [SG1] convert I420 to NV12!", __FUNCTION__); if (int ret = libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, @@ -979,6 +1173,7 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h dst_stride_uv, width, height)) { } } +#endif } else { // For NV12 Input support ALOGVV(LOG_TAG " %s: NV12 frame with scaling to Size = %dx%d", __FUNCTION__, width, @@ -1056,11 +1251,11 @@ void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h ClientVideoBuffer *handle = ClientVideoBuffer::getClientInstance(); //uint8_t *bufData = handle->clientBuf[handle->clientRevCount % 1].buffer; - // #ifdef CROP_ROTATE - // uint8_t *bufData = (uint8_t *)buffer_recv; - // #else + #ifdef CROP_ROTATE + uint8_t *bufData = (uint8_t *)buffer_recv; + #else uint8_t *bufData = handle->clientBuf[handle->clientRevCount % 1].buffer; - //#endif + #endif int src_size = mSrcWidth * mSrcHeight; int dstFrameSize = width * height;