From 5983055d7087320d2227e74be86f63645d7c8a88 Mon Sep 17 00:00:00 2001 From: gkdeepa Date: Wed, 13 Apr 2022 21:08:52 +0530 Subject: [PATCH] Delete 0001-CIV-changes-on-VHAL-on-top-of-opensource.patch --- ...changes-on-VHAL-on-top-of-opensource.patch | 1454 ----------------- 1 file changed, 1454 deletions(-) delete mode 100644 bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-CIV-changes-on-VHAL-on-top-of-opensource.patch diff --git a/bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-CIV-changes-on-VHAL-on-top-of-opensource.patch b/bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-CIV-changes-on-VHAL-on-top-of-opensource.patch deleted file mode 100644 index df412f989b..0000000000 --- a/bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-CIV-changes-on-VHAL-on-top-of-opensource.patch +++ /dev/null @@ -1,1454 +0,0 @@ -From 2bef2496286b74667a4f9ce831dc9d6a0995c8b8 Mon Sep 17 00:00:00 2001 -From: gkdeepa -Date: Tue, 12 Oct 2021 15:06:42 +0530 -Subject: [PATCH] CIV changes on VHAL on top of opensource - ---- - Android.mk | 44 +++-- - include/.CameraSocketServerThread.h.swp | Bin 0 -> 12288 bytes - include/CameraSocketCommand.h | 2 +- - include/CameraSocketServerThread.h | 19 +++ - include/GrallocModule.h | 80 ++++++++- - include/VirtualCameraFactory.h | 17 +- - include/VirtualFakeCamera3.h | 12 +- - include/fake-pipeline2/Sensor.h | 10 +- - src/CGCodec.cpp | 7 - - src/CameraSocketServerThread.cpp | 213 +++++++++++++++++++----- - src/NV21JpegCompressor.cpp | 14 +- - src/VirtualCamera3.cpp | 1 - - src/VirtualCameraFactory.cpp | 56 +++++-- - src/VirtualFakeCamera3.cpp | 88 ++++++++-- - src/fake-pipeline2/Sensor.cpp | 30 ++-- - 15 files changed, 465 insertions(+), 128 deletions(-) - create mode 100644 include/.CameraSocketServerThread.h.swp - -diff --git a/Android.mk b/Android.mk -index 8acc35a..7d3224b 100644 ---- a/Android.mk -+++ b/Android.mk -@@ -12,11 +12,12 @@ - # See the License for the specific language governing permissions and - # limitations under the License. - --ifeq ($(TARGET_USE_CAMERA_VHAL), true) -+#ifeq ($(TARGET_USE_CAMERA_VHAL), true) - LOCAL_PATH := $(call my-dir) - - include $(CLEAR_VARS) - -+ifneq ($(TARGET_BOARD_PLATFORM), celadon) - ####### Build FFmpeg modules from prebuilt libs ######### - - FFMPEG_PREBUILD := prebuilts/ffmpeg-4.2.2/android-x86_64 -@@ -85,12 +86,18 @@ LOCAL_MODULE_SUFFIX := .so - LOCAL_MODULE_CLASS := SHARED_LIBRARIES - include $(BUILD_PREBUILT) - ########################################################## -+endif - - include $(CLEAR_VARS) - - ##################### Build camera-vhal ####################### - -+ifeq ($(TARGET_BOARD_PLATFORM), celadon) -+LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM) -+else - LOCAL_MODULE := camera.$(TARGET_PRODUCT) -+endif -+ - LOCAL_MULTILIB := 64 - LOCAL_VENDOR_MODULE := true - -@@ -108,9 +115,10 @@ camera_vhal_src := \ - src/Exif.cpp \ - src/Thumbnail.cpp \ - src/CameraSocketServerThread.cpp \ -- src/CameraSocketCommand.cpp \ -- src/CGCodec.cpp -- -+ src/CameraSocketCommand.cpp -+ifneq ($(TARGET_BOARD_PLATFORM), celadon) -+camera_vhal_src += src/CGCodec.cpp -+endif - camera_vhal_c_includes := external/libjpeg-turbo \ - external/libexif \ - external/libyuv/files/include \ -@@ -122,6 +130,9 @@ camera_vhal_c_includes := external/libjpeg-turbo \ - $(LOCAL_PATH)/$(FFMPEG_PREBUILD)/include \ - $(call include-path-for, camera) - -+ifeq ($(TARGET_BOARD_PLATFORM), celadon) -+camera_vhal_c_includes += $(INTEL_MINIGBM)/cros_gralloc -+endif - camera_vhal_shared_libraries := \ - libbinder \ - libexif \ -@@ -136,14 +147,17 @@ camera_vhal_shared_libraries := \ - libjpeg \ - libcamera_metadata \ - libhardware \ -- libsync \ -- libavcodec \ -+ libsync -+ -+ifneq ($(TARGET_BOARD_PLATFORM), celadon) -+camera_vhal_shared_libraries += libavcodec \ - libavdevice \ - libavfilter \ - libavformat \ - libavutil \ - libswresample \ -- libswscale -+ libswscale -+endif - - camera_vhal_static_libraries := \ - android.hardware.camera.common@1.0-helper \ -@@ -158,6 +172,12 @@ ifeq ($(BOARD_USES_GRALLOC1), true) - camera_vhal_cflags += -DUSE_GRALLOC1 - endif - -+ifeq ($(TARGET_BOARD_PLATFORM), celadon) -+camera_vhal_cflags += -DGRALLOC_MAPPER4 -+else -+camera_vhal_cflags += -DENABLE_FFMPEG -+endif -+ - LOCAL_MODULE_RELATIVE_PATH := ${camera_vhal_module_relative_path} - LOCAL_CFLAGS := ${camera_vhal_cflags} - LOCAL_CPPFLAGS += -std=c++17 -@@ -200,8 +220,8 @@ jpeg_shared_libraries := \ - jpeg_c_includes := external/libjpeg-turbo \ - external/libexif \ - frameworks/native/include \ -- $(LOCAL_PATH)/include \ -- $(LOCAL_PATH)/include/jpeg-stub \ -+ $(LOCAL_PATH)/include \ -+ $(LOCAL_PATH)/include/jpeg-stub \ - - jpeg_src := \ - src/jpeg-stub/Compressor.cpp \ -@@ -217,10 +237,14 @@ LOCAL_SHARED_LIBRARIES := ${jpeg_shared_libraries} - LOCAL_C_INCLUDES += ${jpeg_c_includes} - LOCAL_SRC_FILES := ${jpeg_src} - -+ifeq ($(TARGET_BOARD_PLATFORM), celadon) -+LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM).jpeg -+else - LOCAL_MODULE := camera.$(TARGET_PRODUCT).jpeg -+endif - - include $(BUILD_SHARED_LIBRARY) - - ###################################################### - --endif # TARGET_USE_CAMERA_VHAL -+#endif # TARGET_USE_CAMERA_VHAL -diff --git a/include/.CameraSocketServerThread.h.swp b/include/.CameraSocketServerThread.h.swp -new file mode 100644 -index 0000000000000000000000000000000000000000..bc725330e470ac85f9824515330c767b883f22fe -GIT binary patch -literal 12288 -zcmeI2&u<$=6vrn>{Du|^7jBP4qG?FIN!muiNh@5}X|3i*6+5Xonepz}9=gAnnT=ym -zL0k|=ehKvg-1!HPxFI1Sj!1|fQmK$QafK5HB)&821c$U06({Jf^og@OKi-?~%)aL+ -zuRXI>U!{f848!#pV?X`;_KR29p?ki1a*TDa1VW_Tw%qoTahSEu%XqT=*xx>fx5Y=HK%qdPK%qdPK%qdPK%qdPK%qdPK%qdPK%u~Y -zPyrq=_RYPF(fx29|Nr0p`~Q!_jC~2d0B?ZP;0QPjt{-CT2ke-fMZ}190lJ)pAUfm -zRq#BR1V@3@?z{W`gPUKtTqsZ|P$=;KQ(!{Gf$T7LmB{lbM#-2?QCddBsxx+Vkr6H} -zUlA=sQC>}UHtTeRYRHMv>)%ZDilEapGc;v_a+xc|2TK`B-4GqD(zwQeE~ulO&-oNntvrM0LHMi9%0x{Q231lM^ZLisQ2<-0Cni -z;yW_RBDYXVI>hP1-26#`7O0*h8AFf(@ora%E;llX_gXZ~pE`Bo6txFN=n@Wylgq8a -zB$4!&v@5_XmxtweLTe0f7;eNFdL}yfbVLP>nm-Y=Qlplt=eJ}al5@gO0->P7=3pb4W)?9asXwH&yLPzl#IK(>RsZdmp8gWqeX)IhPI&~MH^WhWALIoBi#&zh<2(Z -z4hPiliI}80s!2v^q709XQ(!eSVuvF4ksBHoDUzGgxJty8BuOWaQG(J$h1Kovry)_M -zap-K#;-HzpiOFM&ELCzF+LfIZ_hTE?k>rPhtNdI}`jCyL+U}4}WA6<0Wng;wuRS5V -zJ%fn|*MXtK=XOk2m|ZQcUDrLYv!oH%Ue1IzwH;|Had4SS^Y^aKf)ab(Y`dH5wOf^E -z+8)EV(%QUbH*L?D&UaN~t#W4?j@ftTADlb80^tmWS~!2KEqmSZ6n4;^J9(R<9pq_FEc;nTHn$zl_~E?dCe=mIvlW-Z(;86GWSiQ -z2H24Ufe!;vrBKG%j<+4P@|RhcdT0J>9UonNHHjj`&QfoDcN6z!8LytN;^p;S -z+s&4?*K^5*zY!Bl`JRqGi9`g~N1x5Ev4lI_tDV)PkxlDv;jg)ckOr;x{pYh6KI8;oqQ -zqg1F!x?rtsHnh`Rt~W2NZ#L<2Wn-hV)~weWw7x;r^|f>LW_^7Pcgs{+dx0+2*Un9o -zkZ?nh*hv+FKEWhHB%*WRTneP!uQtMDaL{RznnpVy-|(2mD-EhQ#_4ROQEyCJhc@HT -zPHp3hM5aIzBA&vA&)cEEqWabnNY&kimu`{{L=1ru{e!pMh -z$b3EF(k7;}LTYcNUahS)YRBhL1fP$3A<2+A2b8o8w>gshB*k54pV%B3VTuOBu5n^0 -z({?+s#*T!WV#gP$%dvi91>!NjfAk^V=qygN;g*gHR&jprZuT^731#yu!AF|3$6$Oo -wXxzqdx6~?pR5l6FWc3)nTFjn6kPw6bZtLu6Jw-;#ze&&~mAr(vhj~-%PX$(B#sB~S - -literal 0 -HcmV?d00001 - -diff --git a/include/CameraSocketCommand.h b/include/CameraSocketCommand.h -index 93d63ef..07b835f 100644 ---- a/include/CameraSocketCommand.h -+++ b/include/CameraSocketCommand.h -@@ -32,7 +32,7 @@ namespace android { - - namespace socket { - --enum class VideoCodecType { kH264 = 0 }; -+enum class VideoCodecType { kH264 = 0, kI420 }; - enum class FrameResolution { k480p = 0, k720p, k1080p }; - - struct CameraFrameInfo { -diff --git a/include/CameraSocketServerThread.h b/include/CameraSocketServerThread.h -index cf376d5..9573a2a 100644 ---- a/include/CameraSocketServerThread.h -+++ b/include/CameraSocketServerThread.h -@@ -28,16 +28,31 @@ - #include - #include - #include -+#ifdef ENABLE_FFMPEG - #include "CGCodec.h" -+#endif - #include "CameraSocketCommand.h" -+#include - - namespace android { - -+enum tranSock -+{ -+ UNIX = 0, -+ TCP = 1, -+ VSOCK = 2, -+}; -+ - class VirtualCameraFactory; - class CameraSocketServerThread : public Thread { - public: -+#ifdef ENABLE_FFMPEG - CameraSocketServerThread(std::string suffix, std::shared_ptr decoder, - std::atomic &state); -+#else -+ CameraSocketServerThread(std::string suffix, -+ std::atomic &state); -+#endif - ~CameraSocketServerThread(); - - virtual void requestExit(); -@@ -55,10 +70,14 @@ private: - int mSocketServerFd = -1; - std::string mSocketPath; - int mClientFd = -1; -+ ssize_t size_update = 0; - -+#ifdef ENABLE_FFMPEG - std::shared_ptr mVideoDecoder; -+#endif - std::atomic &mCameraSessionState; - -+ - // maximum size of a H264 packet in any aggregation packet is 65535 bytes. - // Source: https://tools.ietf.org/html/rfc6184#page-13 - std::array mSocketBuffer = {}; -diff --git a/include/GrallocModule.h b/include/GrallocModule.h -index ca9cab7..04c55a0 100644 ---- a/include/GrallocModule.h -+++ b/include/GrallocModule.h -@@ -2,21 +2,17 @@ - #define EMU_CAMERA_GRALLOC_MODULE_H - - //#define LOG_NDEBUG 0 --#undef ALOGVV --#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 --#define ALOGVV ALOGV --#else --#define ALOGVV(...) ((void)0) --#endif -- - #include - #include - #include - - #ifdef USE_GRALLOC1 - #include -+#include "i915_private_android_types.h" -+#ifndef GRALLOC_MAPPER4 - #include - #endif -+#endif - - class GrallocModule { - public: -@@ -110,8 +106,10 @@ public: - int32_t fenceFd = -1; - int error = m_gralloc1_unlock(m_gralloc1_device, handle, &fenceFd); - if (!error) { -+#ifndef GRALLOC_MAPPER4 - sync_wait(fenceFd, -1); - close(fenceFd); -+#endif - } - return error; - } -@@ -125,7 +123,59 @@ public: - } - } - } -- -+#ifdef GRALLOC_MAPPER4 -+ int importBuffer(buffer_handle_t handle, buffer_handle_t *outBuffer) { -+ switch (m_major_version) { -+ case 1: -+#ifdef USE_GRALLOC1 -+ { -+ return m_gralloc1_importbuffer(m_gralloc1_device, handle, outBuffer); -+ } -+#endif -+ default: { -+ ALOGE( -+ "[Gralloc] no gralloc module to import; unknown gralloc major " -+ "version (%d)", -+ m_major_version); -+ return -1; -+ } -+ } -+ } -+ int freeBuffer(void *Buffer) { -+ switch (m_major_version) { -+ case 1: -+#ifdef USE_GRALLOC1 -+ { -+ return m_gralloc1_freebuffer(m_gralloc1_device, Buffer); -+ } -+#endif -+ default: { -+ ALOGE( -+ "[Gralloc] no gralloc module to free; unknown gralloc major " -+ "version (%d)", -+ m_major_version); -+ return -1; -+ } -+ } -+ } -+ int release_handle(buffer_handle_t handle ) { -+ switch (m_major_version) { -+ case 1: -+#ifdef USE_GRALLOC1 -+ { -+ return m_gralloc1_release(m_gralloc1_device,handle); -+ } -+#endif -+ default: { -+ ALOGE( -+ "[Gralloc] no gralloc module to free; unknown gralloc major " -+ "version (%d)", -+ m_major_version); -+ return -1; -+ } -+ } -+ } -+#endif - private: - GrallocModule() { - const hw_module_t *module = nullptr; -@@ -133,7 +183,6 @@ private: - if (ret) { - ALOGE("%s: Failed to get gralloc module: %d", __FUNCTION__, ret); - } -- - mModule = nullptr; - m_major_version = (module->module_api_version >> 8) & 0xff; - switch (m_major_version) { -@@ -152,6 +201,14 @@ private: - m_gralloc1_getNumFlexPlanes = - (GRALLOC1_PFN_GET_NUM_FLEX_PLANES)m_gralloc1_device->getFunction( - m_gralloc1_device, GRALLOC1_FUNCTION_GET_NUM_FLEX_PLANES); -+#ifdef GRALLOC_MAPPER4 -+ m_gralloc1_importbuffer = (GRALLOC1_PFN_IMPORT_BUFFER)m_gralloc1_device->getFunction( -+ m_gralloc1_device, GRALLOC1_FUNCTION_IMPORT_BUFFER); -+ m_gralloc1_freebuffer = (GRALLOC1_PFN_FREE_BUFFER)m_gralloc1_device->getFunction( -+ m_gralloc1_device, GRALLOC1_FUNCTION_FREE_BUFFER); -+ m_gralloc1_release = (GRALLOC1_PFN_RELEASE)m_gralloc1_device->getFunction( -+ m_gralloc1_device, GRALLOC1_FUNCTION_RELEASE); -+#endif - break; - #endif - default: -@@ -167,6 +224,11 @@ private: - GRALLOC1_PFN_UNLOCK m_gralloc1_unlock = nullptr; - GRALLOC1_PFN_LOCK_FLEX m_gralloc1_lockflex = nullptr; - GRALLOC1_PFN_GET_NUM_FLEX_PLANES m_gralloc1_getNumFlexPlanes = nullptr; -+#ifdef GRALLOC_MAPPER4 -+ GRALLOC1_PFN_IMPORT_BUFFER m_gralloc1_importbuffer=nullptr; -+ GRALLOC1_PFN_FREE_BUFFER m_gralloc1_freebuffer=nullptr; -+ GRALLOC1_PFN_RELEASE m_gralloc1_release=nullptr; -+#endif - #endif - }; - -diff --git a/include/VirtualCameraFactory.h b/include/VirtualCameraFactory.h -index a5b388d..acb2756 100644 ---- a/include/VirtualCameraFactory.h -+++ b/include/VirtualCameraFactory.h -@@ -26,8 +26,9 @@ - #include - #include - #include "CameraSocketServerThread.h" -+#ifdef ENABLE_FFMPEG - #include "CGCodec.h" -- -+#endif - namespace android { - - class CameraSocketServerThread; -@@ -178,8 +179,13 @@ private: - * true, it will be created as if it were a camera on the back of the phone. - * Otherwise, it will be front-facing. - */ -+#ifdef ENABLE_FFMPEG - void createFakeCamera(std::shared_ptr socket_server, - std::shared_ptr decoder, bool backCamera); -+#else -+ void createFakeCamera(std::shared_ptr socket_server, -+ bool backCamera); -+#endif - /* - * Waits till remote-props has done setup, timeout after 500ms. - */ -@@ -223,13 +229,16 @@ public: - static struct hw_module_methods_t mCameraModuleMethods; - - private: -- // NV12 Decoder -- std::shared_ptr mDecoder; - - // Socket server - std::shared_ptr mSocketServer; -- -+#ifdef ENABLE_FFMPEG -+ // NV12 Decoder -+ std::shared_ptr mDecoder; - bool createSocketServer(std::shared_ptr decoder); -+#else -+ bool createSocketServer(); -+#endif - }; - - }; // end of namespace android -diff --git a/include/VirtualFakeCamera3.h b/include/VirtualFakeCamera3.h -index ec45e00..0b17f8a 100644 ---- a/include/VirtualFakeCamera3.h -+++ b/include/VirtualFakeCamera3.h -@@ -33,7 +33,9 @@ - #include - #include - #include -+#ifdef ENABLE_FFMPEG - #include "CGCodec.h" -+#endif - #include "CameraSocketServerThread.h" - #include "CameraSocketCommand.h" - -@@ -52,11 +54,16 @@ namespace android { - */ - class VirtualFakeCamera3 : public VirtualCamera3, private Sensor::SensorListener { - public: -+#ifdef ENABLE_FFMPEG - VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, - std::shared_ptr socket_server, - std::shared_ptr decoder, - std::atomic &state); -- -+#else -+ VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, -+ std::shared_ptr socket_server, -+ std::atomic &state); -+#endif - virtual ~VirtualFakeCamera3(); - - /**************************************************************************** -@@ -194,9 +201,10 @@ private: - - // socket server - std::shared_ptr mSocketServer; -+#ifdef ENABLE_FFMPEG - // NV12 Video decoder handle - std::shared_ptr mDecoder = nullptr; -- -+#endif - std::atomic &mCameraSessionState; - - bool createSocketServer(bool facing_back); -diff --git a/include/fake-pipeline2/Sensor.h b/include/fake-pipeline2/Sensor.h -index ca198c5..b83e771 100644 ---- a/include/fake-pipeline2/Sensor.h -+++ b/include/fake-pipeline2/Sensor.h -@@ -78,8 +78,10 @@ - #include "utils/Thread.h" - #include "utils/Mutex.h" - #include "utils/Timers.h" -+#ifdef ENABLE_FFMPEG - #include "CGCodec.h" - #include "CGLog.h" -+#endif - #include - #include - #include -@@ -98,7 +100,11 @@ class Sensor : private Thread, public virtual RefBase { - public: - // width: Width of pixel array - // height: Height of pixel array -+#ifdef ENABLE_FFMPEG - Sensor(uint32_t width, uint32_t height, std::shared_ptr decoder = nullptr); -+#else -+ Sensor(uint32_t width, uint32_t height); -+#endif - ~Sensor(); - - /* -@@ -270,9 +276,9 @@ private: - // vHAL buffer - int mSrcWidth = 640; - int mSrcHeight = 480; -- -+#ifdef ENABLE_FFMPEG - std::shared_ptr mDecoder = {}; -- -+#endif - bool getNV12Frames(uint8_t *out_buf, int *out_size, std::chrono::milliseconds timeout_ms = 5ms); - void dump_yuv(uint8_t *img1, size_t img1_size, uint8_t *img2, size_t img2_size, - const std::string &filename); -diff --git a/src/CGCodec.cpp b/src/CGCodec.cpp -index af090a7..ed78d9b 100644 ---- a/src/CGCodec.cpp -+++ b/src/CGCodec.cpp -@@ -25,7 +25,6 @@ - #endif - - #define MAX_DEVICE_NAME_SIZE 21 --#define MAX_ALLOWED_PENDING_FRAMES 2 - - #include - #include -@@ -409,12 +408,6 @@ int CGVideoDecoder::get_decoded_frame(CGVideoFrame::Ptr cg_frame) { - if (m_decode_ctx->decoded_frames.empty()) - return -1; - -- while (m_decode_ctx->decoded_frames.size() > MAX_ALLOWED_PENDING_FRAMES) { -- auto it = m_decode_ctx->decoded_frames.begin(); -- AVFrame *frame = *it; -- av_frame_free(&frame); -- m_decode_ctx->decoded_frames.erase(it); -- } - // return the frame in the front - auto it = m_decode_ctx->decoded_frames.begin(); - AVFrame *frame = *it; -diff --git a/src/CameraSocketServerThread.cpp b/src/CameraSocketServerThread.cpp -index 116580c..62a46e4 100644 ---- a/src/CameraSocketServerThread.cpp -+++ b/src/CameraSocketServerThread.cpp -@@ -17,20 +17,17 @@ - //#define LOG_NNDEBUG 0 - #define LOG_TAG "CameraSocketServerThread: " - #include -- - #ifdef LOG_NNDEBUG - #define ALOGVV(...) ALOGV(__VA_ARGS__) - #else - #define ALOGVV(...) ((void)0) - #endif -- - #include - #include - #include - #include - #include - #include -- - #include - #include - #include -@@ -42,18 +39,25 @@ - #include "CameraSocketServerThread.h" - #include "VirtualBuffer.h" - #include "VirtualCameraFactory.h" -+#include - #include - - android::ClientVideoBuffer *android::ClientVideoBuffer::ic_instance = 0; - - namespace android { -- - using namespace socket; -+#ifdef ENABLE_FFMPEG - CameraSocketServerThread::CameraSocketServerThread(std::string suffix, - std::shared_ptr decoder, - std::atomic &state) - : Thread(/*canCallJava*/ false), mRunning{true}, mSocketServerFd{-1}, - mVideoDecoder{decoder}, mCameraSessionState{state} { -+#else -+CameraSocketServerThread::CameraSocketServerThread(std::string suffix, -+ std::atomic &state) -+ : Thread(/*canCallJava*/ false), mRunning{true}, mSocketServerFd{-1}, -+ mCameraSessionState{state} { -+#endif - std::string sock_path = "/ipc/camera-socket" + suffix; - char *k8s_env_value = getenv("K8S_ENV"); - mSocketPath = (k8s_env_value != NULL && !strcmp(k8s_env_value, "true")) -@@ -119,20 +123,49 @@ void CameraSocketServerThread::clearBuffer(char *buffer, int width, int height) - } - - bool CameraSocketServerThread::threadLoop() { -- mSocketServerFd = ::socket(AF_UNIX, SOCK_STREAM, 0); -- if (mSocketServerFd < 0) { -- ALOGE("%s:%d Fail to construct camera socket with error: %s", __FUNCTION__, __LINE__, -- strerror(errno)); -- return false; -- } -- - struct sockaddr_un addr_un; - memset(&addr_un, 0, sizeof(addr_un)); - addr_un.sun_family = AF_UNIX; -- strncpy(&addr_un.sun_path[0], mSocketPath.c_str(), strlen(mSocketPath.c_str())); -- - int ret = 0; -- if ((access(mSocketPath.c_str(), F_OK)) != -1) { -+ int new_client_fd =-1; -+ int so_reuseaddr = 1; -+ struct sockaddr_vm addr_vm ; -+ struct sockaddr_in addr_ip; -+ int trans_mode = 0; -+ char mode[PROPERTY_VALUE_MAX]; -+ -+ if ((property_get("ro.vendor.camera.transference", mode, nullptr) > 0) ){ -+ if (!strcmp(mode, "TCP")) { -+ trans_mode = TCP; -+ }else if (!strcmp(mode, "UNIX")) { -+ trans_mode = UNIX; -+ }else if (!strcmp(mode, "VSOCK")) { -+ trans_mode = VSOCK; -+ } -+ } -+ else{ -+ //Fall back to unix socket by default -+ //trans_mode = UNIX; -+ //TODO -+ trans_mode = VSOCK; -+ ALOGVV("%s: falling back to UNIX as the trans mode is not set",__FUNCTION__); -+ } -+ if(trans_mode == UNIX) -+ { -+ mSocketServerFd = ::socket(AF_UNIX, SOCK_STREAM, 0); -+ if (mSocketServerFd < 0) { -+ ALOGE("%s:%d Fail to construct camera socket with error: %s", __FUNCTION__, __LINE__, -+ strerror(errno)); -+ return false; -+ } -+ -+ struct sockaddr_un addr_un; -+ memset(&addr_un, 0, sizeof(addr_un)); -+ addr_un.sun_family = AF_UNIX; -+ strncpy(&addr_un.sun_path[0], mSocketPath.c_str(), strlen(mSocketPath.c_str())); -+ -+ int ret = 0; -+ if ((access(mSocketPath.c_str(), F_OK)) != -1) { - ALOGI(" %s camera socket server file is %s", __FUNCTION__, mSocketPath.c_str()); - ret = unlink(mSocketPath.c_str()); - if (ret < 0) { -@@ -140,43 +173,117 @@ bool CameraSocketServerThread::threadLoop() { - mSocketPath.c_str(), ret, strerror(errno)); - return false; - } -- } else { -- ALOGV(LOG_TAG " %s camera socket server file %s will created. ", __FUNCTION__, -+ } else { -+ ALOGV(LOG_TAG " %s camera socket server file %s will created. ", __FUNCTION__, - mSocketPath.c_str()); -- } -+ } - -- ret = ::bind(mSocketServerFd, (struct sockaddr *)&addr_un, -+ ret = ::bind(mSocketServerFd, (struct sockaddr *)&addr_un, - sizeof(sa_family_t) + strlen(mSocketPath.c_str()) + 1); -- if (ret < 0) { -- ALOGE(LOG_TAG " %s Failed to bind %s address %d, %s", __FUNCTION__, mSocketPath.c_str(), -+ if (ret < 0) { -+ ALOGE(LOG_TAG " %s Failed to bind %s address %d, %s", __FUNCTION__, mSocketPath.c_str(), - ret, strerror(errno)); -- return false; -- } -+ return false; -+ } -+ struct stat st; -+ __mode_t mod = S_IRWXU | S_IRWXG | S_IRWXO; -+ if (fstat(mSocketServerFd, &st) == 0) { -+ mod |= st.st_mode; -+ } -+ chmod(mSocketPath.c_str(), mod); -+ stat(mSocketPath.c_str(), &st); - -- struct stat st; -- __mode_t mod = S_IRWXU | S_IRWXG | S_IRWXO; -- if (fstat(mSocketServerFd, &st) == 0) { -- mod |= st.st_mode; -+ ret = listen(mSocketServerFd, 5); -+ if (ret < 0) { -+ ALOGE("%s Failed to listen on %s", __FUNCTION__, mSocketPath.c_str()); -+ return false; -+ } - } -- chmod(mSocketPath.c_str(), mod); -- stat(mSocketPath.c_str(), &st); -+ else if(trans_mode == TCP){ -+ int ret = 0; -+ int new_client_fd =-1; -+ int port = 8085; -+ int so_reuseaddr = 1; -+ -+ mSocketServerFd = ::socket(AF_INET, SOCK_STREAM, 0); -+ if (mSocketServerFd < 0) { -+ ALOGE(LOG_TAG " %s:Line:[%d] Fail to construct camera socket with error: [%s]", -+ __FUNCTION__, __LINE__, strerror(errno)); -+ return false; -+ } -+ if (setsockopt(mSocketServerFd, SOL_SOCKET, SO_REUSEADDR, &so_reuseaddr, -+ sizeof(int)) < 0) { -+ ALOGE(LOG_TAG " %s setsockopt(SO_REUSEADDR) failed. : %d\n", __func__, -+ mSocketServerFd); -+ return false; -+ } -+ addr_ip.sin_family = AF_INET; -+ addr_ip.sin_addr.s_addr = htonl(INADDR_ANY); -+ addr_ip.sin_port = htons(port); - -- ret = listen(mSocketServerFd, 5); -- if (ret < 0) { -- ALOGE("%s Failed to listen on %s", __FUNCTION__, mSocketPath.c_str()); -+ ret = ::bind(mSocketServerFd, (struct sockaddr *)&addr_ip, -+ sizeof(struct sockaddr_in)); -+ if (ret < 0) { -+ ALOGE(LOG_TAG " %s Failed to bind port(%d). ret: %d, %s", __func__, port, ret, -+ strerror(errno)); -+ return false; -+ } -+ ret = listen(mSocketServerFd, 5); -+ if (ret < 0) { -+ ALOGE("%s Failed to listen on ", __FUNCTION__); -+ return false; -+ } -+ }else{ -+ memset(&addr_ip, 0, sizeof(addr_ip)); -+ addr_vm.svm_family = AF_VSOCK; -+ addr_vm.svm_port = 1982; -+ addr_vm.svm_cid = 3; -+ //addr_vm.svm_port = htons(1234); -+ //addr_vm.svm_cid = 4; -+ int ret = 0; -+ int port = 1234; -+ int so_reuseaddr = 1; -+ size_update = 0; -+ mSocketServerFd = ::socket(AF_VSOCK, SOCK_STREAM, 0); -+ if (mSocketServerFd < 0) { -+ ALOGE(LOG_TAG " %s:Line:[%d] Fail to construct camera socket with error: [%s]", -+ __FUNCTION__, __LINE__, strerror(errno)); - return false; -- } -+ } -+ ret = ::bind(mSocketServerFd, (struct sockaddr *)&addr_vm, -+ sizeof(struct sockaddr_vm)); -+ if (ret < 0) { -+ ALOGE(LOG_TAG " %s Failed to bind port(%d). ret: %d, %s", __func__, port, ret, -+ strerror(errno)); -+ return false; -+ } -+ ret = listen(mSocketServerFd, 32); -+ if (ret < 0) { -+ ALOGE("%s Failed to listen on ", __FUNCTION__); -+ return false; -+ } - -+ } - while (mRunning) { - ALOGI(LOG_TAG " %s: Wait for camera client to connect. . .", __FUNCTION__); - -- socklen_t alen = sizeof(struct sockaddr_un); -- -- int new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_un, &alen); -+ if (trans_mode == TCP) { -+ socklen_t alen = sizeof(struct sockaddr_in); -+ new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_ip, &alen); -+ } -+ else if(trans_mode == VSOCK){ -+ socklen_t alen = sizeof(struct sockaddr_vm); -+ new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_vm, &alen); -+ } -+ else -+ { -+ socklen_t alen = sizeof(struct sockaddr_un); -+ new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_un, &alen); -+ } - ALOGI(LOG_TAG " %s: Accepted client: [%d]", __FUNCTION__, new_client_fd); - if (new_client_fd < 0) { -- ALOGE(LOG_TAG " %s: Fail to accept client. Error: [%s]", __FUNCTION__, strerror(errno)); -- continue; -+ ALOGE(LOG_TAG " %s: Fail to accept client. Error: [%s]", __FUNCTION__, strerror(errno)); -+ continue; - } - mClientFd = new_client_fd; - -@@ -196,7 +303,6 @@ bool CameraSocketServerThread::threadLoop() { - int ret = poll(&fd, 1, 3000); // 1 second for timeout - - event = fd.revents; // returned events -- - if (event & POLLHUP) { - // connnection disconnected => socket is closed at the other end => close the - // socket. -@@ -206,19 +312,37 @@ bool CameraSocketServerThread::threadLoop() { - mClientFd = -1; - clearBuffer(fbuffer, 640, 480); - break; -- } else if (event & POLLIN) { // preview / record -+ } else if ((event & POLLIN) || (trans_mode == VSOCK) || (trans_mode == TCP) ) { // preview / record - // data is available in socket => read data - if (gIsInFrameI420) { - ssize_t size = 0; - -- if ((size = recv(mClientFd, (char *)fbuffer, 460800, MSG_WAITALL)) > 0) { -+ //in VSOCk case the MSG_WAITALL is not helping in getting the complete buffer -+ if(trans_mode == VSOCK) -+ { -+ while(size_update != 460800){ -+ size = recv(mClientFd, (char *)fbuffer+size_update, 460800, 0); -+ size_update += size; -+ if (size_update == 460800){ -+ handle->clientRevCount++; -+ size_update = 0; -+ ALOGVV(LOG_TAG -+ "[I420] %s: Packet rev %d and " -+ "size %zd", -+ __FUNCTION__, handle->clientRevCount, size); -+ } -+ } -+ }else{ -+ if ((size = recv(mClientFd, (char *)fbuffer, 460800, MSG_WAITALL)) > 0) { - handle->clientRevCount++; - ALOGVV(LOG_TAG -- "[I420] %s: Pocket rev %d and " -+ "[I420] %s: Packet rev %d and " - "size %zd", - __FUNCTION__, handle->clientRevCount, size); -+ } - } - } else if (gIsInFrameH264) { // default H264 -+#ifdef ENABLE_FFMPEG - size_t recv_frame_size = 0; - ssize_t size = 0; - if ((size = recv(mClientFd, (char *)&recv_frame_size, sizeof(size_t), -@@ -243,6 +367,7 @@ bool CameraSocketServerThread::threadLoop() { - case CameraSessionState::kCameraOpened: - mCameraSessionState = CameraSessionState::kDecodingStarted; - ALOGVV("%s [H264] Decoding started now.", __func__); -+ [[fallthrough]]; - case CameraSessionState::kDecodingStarted: - mVideoDecoder->decode(mSocketBuffer.data(), mSocketBufferSize); - handle->clientRevCount++; -@@ -253,22 +378,24 @@ bool CameraSocketServerThread::threadLoop() { - mVideoDecoder->flush_decoder(); - mVideoDecoder->destroy(); - mCameraSessionState = CameraSessionState::kDecodingStopped; -- ALOGI("%s [H264] Decoding stopped now.", __func__); -+ ALOGVV("%s [H264] Decoding stopped now.", __func__); - break; - case CameraSessionState::kDecodingStopped: - ALOGVV("%s [H264] Decoding is already stopped, skip the packets", - __func__); -+ [[fallthrough]]; - default: - ALOGE("%s [H264] Invalid Camera session state!", __func__); - break; - } - } - } -+#endif - } else { - ALOGE("%s: only H264, I420 input frames supported", __FUNCTION__); - } - } else { -- // ALOGE("%s: continue polling..", __FUNCTION__); -+ ALOGE("%s: continue polling..", __FUNCTION__); - } - } - } -diff --git a/src/NV21JpegCompressor.cpp b/src/NV21JpegCompressor.cpp -index 3348959..4fb0ab7 100644 ---- a/src/NV21JpegCompressor.cpp -+++ b/src/NV21JpegCompressor.cpp -@@ -45,18 +45,20 @@ typedef void (*GetCompressedImageFunc)(JpegStub *stub, void *buff); - typedef size_t (*GetCompressedSizeFunc)(JpegStub *stub); - - NV21JpegCompressor::NV21JpegCompressor() { -- const char dlName[] = "/system/vendor/lib64/hw/camera.cic_cloud.jpeg.so"; -+ ALOGE("picking from /system/vendor/lib64/hw/camera.celadon.jpeg.so"); -+ const char dlName[] = "/system/vendor/lib64/hw/camera.celadon.jpeg.so"; - if (!mDl) { -- mDl = dlopen(dlName, RTLD_NOW); -+ mDl = dlopen(dlName, RTLD_NOW); - } - if (mDl) { - InitFunc f = (InitFunc)getSymbol(mDl, "JpegStub_init"); - if (f) -- (*f)(&mStub); -+ (*f)(&mStub); - else -- ALOGE("%s: Fatal error: getSymbol(JpegStub_init) failed", __func__); -- } else { -- ALOGE("%s: Fatal error: dlopen(%s) failed", __func__, dlName); -+ ALOGE("%s: Fatal error: getSymbol(JpegStub_init) failed", __func__); -+ } -+ else { -+ ALOGE("%s: Fatal error: dlopen(%s) failed", __func__, dlName); - } - } - -diff --git a/src/VirtualCamera3.cpp b/src/VirtualCamera3.cpp -index 587a881..d7a71df 100644 ---- a/src/VirtualCamera3.cpp -+++ b/src/VirtualCamera3.cpp -@@ -86,7 +86,6 @@ status_t VirtualCamera3::connectCamera(hw_device_t **device) { - - status_t VirtualCamera3::closeCamera() { - mStatus = STATUS_CLOSED; -- ALOGI("%s : Camera session closed successfully!!!", __FUNCTION__); - return NO_ERROR; - } - -diff --git a/src/VirtualCameraFactory.cpp b/src/VirtualCameraFactory.cpp -index ce2fecd..a5e6251 100644 ---- a/src/VirtualCameraFactory.cpp -+++ b/src/VirtualCameraFactory.cpp -@@ -19,14 +19,15 @@ - * available for emulation. - */ - --//#define LOG_NDEBUG 0 -+#define LOG_NDEBUG 0 - #define LOG_TAG "VirtualCamera_Factory" - - #include "VirtualCameraFactory.h" - #include "VirtualFakeCamera3.h" - #include "CameraSocketServerThread.h" -+#ifdef ENABLE_FFMPEG - #include "CGCodec.h" -- -+#endif - #include - #include - -@@ -49,10 +50,8 @@ void VirtualCameraFactory::readSystemProperties() { - - property_get("ro.vendor.camera.in_frame_format.h264", prop_val, "false"); - gIsInFrameH264 = !strcmp(prop_val, "true"); -- - property_get("ro.vendor.camera.in_frame_format.i420", prop_val, "false"); - gIsInFrameI420 = !strcmp(prop_val, "true"); -- - property_get("ro.vendor.camera.decode.vaapi", prop_val, "false"); - gUseVaapi = !strcmp(prop_val, "true"); - -@@ -71,7 +70,6 @@ VirtualCameraFactory::VirtualCameraFactory() - * array of virtual cameras before populating it. - */ - int virtualCamerasSize = 0; -- - mCameraSessionState = socket::CameraSessionState::kNone; - - waitForRemoteSfFakeCameraPropertyAvailable(); -@@ -105,19 +103,33 @@ VirtualCameraFactory::VirtualCameraFactory() - if (gIsInFrameH264) { - // create decoder - ALOGV("%s Creating decoder.", __func__); -+#ifdef ENABLE_FFMPEG - mDecoder = std::make_shared(); -+#endif - } - - // create socket server who push packets to decoder -+#ifdef ENABLE_FFMPEG - createSocketServer(mDecoder); -+#else -+ createSocketServer(); -+#endif - ALOGV("%s socket server created: ", __func__); - - // Create fake cameras, if enabled. - if (isFakeCameraEmulationOn(/* backCamera */ true)) { -+#ifdef ENABLE_FFMPEG - createFakeCamera(mSocketServer, mDecoder, /* backCamera */ true); -+#else -+ createFakeCamera(mSocketServer, /* backCamera */ true); -+#endif - } - if (isFakeCameraEmulationOn(/* backCamera */ false)) { -+#ifdef ENABLE_FFMPEG - createFakeCamera(mSocketServer, mDecoder, /* backCamera */ false); -+#else -+ createFakeCamera(mSocketServer, /* backCamera */ false); -+#endif - } - - ALOGI("%d cameras are being virtual. %d of them are fake cameras.", mVirtualCameraNum, -@@ -125,20 +137,26 @@ VirtualCameraFactory::VirtualCameraFactory() - - mConstructedOK = true; - } -- -+#ifdef ENABLE_FFMPEG - bool VirtualCameraFactory::createSocketServer(std::shared_ptr decoder) { -+#else -+bool VirtualCameraFactory::createSocketServer() { -+#endif - ALOGV("%s: E", __FUNCTION__); - - char id[PROPERTY_VALUE_MAX] = {0}; -+#ifdef ENABLE_FFMPEG - if (property_get("ro.boot.container.id", id, "") > 0) { - mSocketServer = - std::make_shared(id, decoder, std::ref(mCameraSessionState)); -- -- mSocketServer->run("FrontBackCameraSocketServerThread"); - } else - ALOGE("%s: FATAL: container id is not set!!", __func__); -- - ALOGV("%s: X", __FUNCTION__); -+#else -+ mSocketServer = -+ std::make_shared(id, std::ref(mCameraSessionState)); -+#endif -+ mSocketServer->run("FrontBackCameraSocketServerThread"); - // TODO need to return false if error. - return true; - } -@@ -171,7 +189,6 @@ int VirtualCameraFactory::cameraDeviceOpen(int cameraId, hw_device_t **device) { - ALOGI("%s: id = %d", __FUNCTION__, cameraId); - - *device = nullptr; -- - if (!isConstructedOK()) { - ALOGE("%s: VirtualCameraFactory has failed to initialize", __FUNCTION__); - return -EINVAL; -@@ -265,10 +282,14 @@ int VirtualCameraFactory::open_legacy(const struct hw_module_t *module, const ch - /******************************************************************************** - * Internal API - *******************************************************************************/ -- -+#ifdef ENABLE_FFMPEG - void VirtualCameraFactory::createFakeCamera(std::shared_ptr socket_server, - std::shared_ptr decoder, - bool backCamera) { -+#else -+void VirtualCameraFactory::createFakeCamera(std::shared_ptr socket_server, -+ bool backCamera) { -+#endif - int halVersion = getCameraHalVersion(backCamera); - - /* -@@ -282,8 +303,13 @@ void VirtualCameraFactory::createFakeCamera(std::shared_ptr 0) && -+ if ((property_get("ro.vendor.remote.sf.fake_camera", prop, nullptr) > 0) && - (!strcmp(prop, "both") || !strcmp(prop, backCamera ? "back" : "front"))) { - return true; - } else { - return false; - } -+ return true; - } - - int VirtualCameraFactory::getCameraHalVersion(bool backCamera) { -@@ -355,7 +383,7 @@ int VirtualCameraFactory::getCameraHalVersion(bool backCamera) { - * doesn't exist, it is assumed we are working with HAL v1. - */ - char prop[PROPERTY_VALUE_MAX]; -- const char *propQuery = backCamera ? "remote.sf.back_camera_hal" : "remote.sf.front_camera_hal"; -+ const char *propQuery = backCamera ? "ro.vendor.remote.sf.back_camera_hal" : "ro.vendor.remote.sf.front_camera_hal"; - if (property_get(propQuery, prop, nullptr) > 0) { - char *propEnd = prop; - int val = strtol(prop, &propEnd, 10); -diff --git a/src/VirtualFakeCamera3.cpp b/src/VirtualFakeCamera3.cpp -index 55cd0ac..5ad0bc0 100644 ---- a/src/VirtualFakeCamera3.cpp -+++ b/src/VirtualFakeCamera3.cpp -@@ -22,6 +22,7 @@ - #include - - //#define LOG_NNDEBUG 0 -+//#define LOG_NDEBUG 0 - #define LOG_TAG "VirtualFakeCamera3: " - #include - #include -@@ -46,12 +47,13 @@ - #else - #define ALOGVV(...) ((void)0) - #endif -- --#define MAX_TIMEOUT_FOR_CAMERA_CLOSE_SESSION 12 //12ms -- - using namespace std; - using namespace chrono; - using namespace chrono_literals; -+buffer_handle_t bufferHandle; -+buffer_handle_t bufferHandle1; -+buffer_handle_t bufferHandle2; -+buffer_handle_t bufferHandle_3; - - namespace android { - -@@ -96,7 +98,7 @@ const float VirtualFakeCamera3::kExposureWanderMax = 1; - /** - * Camera device lifecycle methods - */ -- -+#ifdef ENABLE_FFMPEG - VirtualFakeCamera3::VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, - std::shared_ptr socket_server, - std::shared_ptr decoder, -@@ -106,6 +108,15 @@ VirtualFakeCamera3::VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_ - mSocketServer(socket_server), - mDecoder(decoder), - mCameraSessionState{state} { -+#else -+VirtualFakeCamera3::VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, -+ std::shared_ptr socket_server, -+ std::atomic &state) -+ : VirtualCamera3(cameraId, module), -+ mFacingBack(facingBack), -+ mSocketServer(socket_server), -+ mCameraSessionState{state} { -+#endif - ALOGI("Constructing virtual fake camera 3: ID %d, facing %s", mCameraID, - facingBack ? "back" : "front"); - -@@ -170,6 +181,14 @@ status_t VirtualFakeCamera3::sendCommandToClient(socket::CameraOperation operati - ALOGE("%s: We're not connected to client yet!", __FUNCTION__); - return INVALID_OPERATION; - } -+ char mode[PROPERTY_VALUE_MAX]; -+ //incase vsock add yuv command -+ if ((property_get("ro.vendor.camera.transference", mode, nullptr) > 0)) -+ { -+ if (!strcmp(mode, "VSOCK")) -+ ALOGE("%s:! sending Vsock ingo!", __FUNCTION__); -+ camera_config.frame_info.codec_type = VideoCodecType::kI420; -+ } - ALOGI("%s: Camera client fd %d!", __FUNCTION__, client_fd); - if (send(client_fd, &camera_config, sizeof(camera_config), 0) < 0) { - ALOGE(LOG_TAG "%s: Failed to send Camera Open command to client, err %s ", __FUNCTION__, -@@ -189,6 +208,7 @@ status_t VirtualFakeCamera3::connectCamera(hw_device_t **device) { - - if (gIsInFrameH264) { - const char *device_name = gUseVaapi ? "vaapi" : nullptr; -+#ifdef ENABLE_FFMPEG - // initialize decoder - if (mDecoder->init(VideoCodecType::kH264, FrameResolution::k480p, device_name, 0) < 0) { - ALOGE("%s VideoDecoder init failed. %s decoding", __func__, -@@ -197,8 +217,8 @@ status_t VirtualFakeCamera3::connectCamera(hw_device_t **device) { - ALOGI("%s VideoDecoder init done. Device: %s", __func__, - !device_name ? "SW" : device_name); - } -+#endif - } -- - ALOGI("%s Calling sendCommandToClient", __func__); - status_t ret; - if ((ret = sendCommandToClient(socket::CameraOperation::kOpen)) != OK) { -@@ -209,7 +229,11 @@ status_t VirtualFakeCamera3::connectCamera(hw_device_t **device) { - mCameraSessionState = socket::CameraSessionState::kCameraOpened; - - // create sensor who gets decoded frames and forwards them to framework -+#ifdef ENABLE_FFMPEG - mSensor = new Sensor(mSensorWidth, mSensorHeight, mDecoder); -+#else -+ mSensor = new Sensor(mSensorWidth, mSensorHeight); -+#endif - mSensor->setSensorListener(this); - - status_t res = mSensor->startUp(); -@@ -255,7 +279,7 @@ status_t VirtualFakeCamera3::closeCamera() { - // and start video stream didnt starts properly so need wait for start - // stream. Need to be removed later once handle startPublication properly in - // remote. If NO processCaptureRequest received between open and close then wait. -- -+ - if (!mprocessCaptureRequestFlag) { - ALOGE(LOG_TAG " %s: wait:..", __FUNCTION__); - std::this_thread::sleep_for(2500ms); -@@ -299,16 +323,10 @@ status_t VirtualFakeCamera3::closeCamera() { - mCameraSessionState = socket::CameraSessionState::kCameraClosed; - - if (gIsInFrameH264) { -- int waitForCameraClose = 0; -- while (mCameraSessionState != socket::CameraSessionState::kDecodingStopped) { -+ while (mCameraSessionState != socket::CameraSessionState::kDecodingStopped) - std::this_thread::sleep_for(2ms); -- waitForCameraClose += 2; // 2 corresponds to 2ms -- if (waitForCameraClose == MAX_TIMEOUT_FOR_CAMERA_CLOSE_SESSION) -- break; -- } - ALOGI("%s Decoding is stopped, now send CLOSE command to client", __func__); - } -- - // Send close command to client - status_t ret = sendCommandToClient(socket::CameraOperation::kClose); - if (ret != OK) { -@@ -978,6 +996,7 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ - // destBuf.width = 640; - // destBuf.height = 480; - // inline with goldfish gralloc -+ - if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { - #ifndef USE_GRALLOC1 - if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) { -@@ -1014,7 +1033,17 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ - if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { - if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) { - android_ycbcr ycbcr = android_ycbcr(); -+ bufferHandle2 = native_handle_clone(*(destBuf.buffer)); -+#ifdef GRALLOC_MAPPER4 -+ res = GrallocModule::getInstance().importBuffer(bufferHandle2, &bufferHandle1); -+ if (res!= OK) { -+ ALOGE("%s: Gralloc importBuffer failed",__FUNCTION__); -+ } -+ res = GrallocModule::getInstance().lock_ycbcr(bufferHandle2, -+#else - res = GrallocModule::getInstance().lock_ycbcr(*(destBuf.buffer), -+#endif -+ - #ifdef USE_GRALLOC1 - GRALLOC1_PRODUCER_USAGE_CPU_WRITE, - #else -@@ -1030,7 +1059,18 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ - res = INVALID_OPERATION; - } - } else { -+#ifdef GRALLOC_MAPPER4 -+ bufferHandle_3 = native_handle_clone(*(destBuf.buffer)); -+ res = GrallocModule::getInstance().importBuffer(bufferHandle_3, &bufferHandle); -+ if (res!= OK) { -+ ALOGE("%s: Gralloc importBuffer failed",__FUNCTION__); -+ } -+ -+ res = GrallocModule::getInstance().lock(bufferHandle_3, -+#else - res = GrallocModule::getInstance().lock(*(destBuf.buffer), -+#endif -+ - #ifdef USE_GRALLOC1 - GRALLOC1_PRODUCER_USAGE_CPU_WRITE, - #else -@@ -1062,8 +1102,19 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ - - sensorBuffers->push_back(destBuf); - buffers->push_back(srcBuf); -+#ifdef GRALLOC_MAPPER4 -+ if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) -+ { -+ GrallocModule::getInstance().unlock(bufferHandle2); -+ native_handle_close(bufferHandle2); -+ } -+ else -+ { -+ GrallocModule::getInstance().unlock(bufferHandle_3); -+ native_handle_close(bufferHandle_3); -+ } -+#endif - } -- - /** - * Wait for JPEG compressor to not be busy, if needed - */ -@@ -2670,7 +2721,7 @@ bool VirtualFakeCamera3::ReadoutThread::threadLoop() { - if (mJpegWaiting) { - // This shouldn't happen, because processCaptureRequest should - // be stalling until JPEG compressor is free. -- ALOGI("%s: Already processing a JPEG!", __FUNCTION__); -+ ALOGE("%s: Already processing a JPEG!", __FUNCTION__); - goodBuffer = false; - } - if (goodBuffer) { -@@ -2697,8 +2748,9 @@ bool VirtualFakeCamera3::ReadoutThread::threadLoop() { - res); - // fallthrough for cleanup - } -+#ifndef GRALLOC_MAPPER4 - GrallocModule::getInstance().unlock(*(buf->buffer)); -- -+#endif - buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; - buf->acquire_fence = -1; - buf->release_fence = -1; -@@ -2788,9 +2840,9 @@ bool VirtualFakeCamera3::ReadoutThread::threadLoop() { - - void VirtualFakeCamera3::ReadoutThread::onJpegDone(const StreamBuffer &jpegBuffer, bool success) { - Mutex::Autolock jl(mJpegLock); -- -+#ifndef GRALLOC_MAPPER4 - GrallocModule::getInstance().unlock(*(jpegBuffer.buffer)); -- -+#endif - mJpegHalBuffer.status = success ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; - mJpegHalBuffer.acquire_fence = -1; - mJpegHalBuffer.release_fence = -1; -diff --git a/src/fake-pipeline2/Sensor.cpp b/src/fake-pipeline2/Sensor.cpp -index 068af5c..0203f83 100644 ---- a/src/fake-pipeline2/Sensor.cpp -+++ b/src/fake-pipeline2/Sensor.cpp -@@ -25,7 +25,9 @@ - #endif - - #include "fake-pipeline2/Sensor.h" -+#ifdef ENABLE_FFMPEG - #include "CGCodec.h" -+#endif - #include - #include - #include -@@ -111,16 +113,22 @@ float sqrtf_approx(float r) { - - return *(float *)(&r_i); - } -- -+#ifdef ENABLE_FFMPEG - Sensor::Sensor(uint32_t width, uint32_t height, std::shared_ptr decoder) -+#else -+Sensor::Sensor(uint32_t width, uint32_t height) -+#endif - : Thread(false), - mResolution{width, height}, - mActiveArray{0, 0, width, height}, - mRowReadoutTime(kFrameDurationRange[0] / height), - mExposureTime(kFrameDurationRange[0] - kMinVerticalBlank), - mFrameDuration(kFrameDurationRange[0]), -- mScene(width, height, kElectronsPerLuxSecond), -- mDecoder{decoder} {} -+ mScene(width, height, kElectronsPerLuxSecond) -+#ifdef ENABLE_FFMPEG -+ ,mDecoder{decoder} -+#endif -+ {} - - Sensor::~Sensor() { shutDown(); } - -@@ -474,7 +482,7 @@ void Sensor::dump_yuv(uint8_t *img1, size_t img1_size, uint8_t *img2, size_t img - fwrite(img2, img2_size, 1, f); - fclose(f); - } -- -+#ifdef ENABLE_FFMPEG - bool Sensor::getNV12Frames(uint8_t *out_buf, int *out_size, - std::chrono::milliseconds timeout_ms /* default 5ms */) { - auto cg_video_frame = std::make_shared(); -@@ -514,7 +522,7 @@ bool Sensor::getNV12Frames(uint8_t *out_buf, int *out_size, - - return true; - } -- -+#endif - void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height) { - ALOGVV("%s: E", __FUNCTION__); - -@@ -539,7 +547,7 @@ void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - - // Initialize to the size based on resolution. - out_size = destPrevBufSize; -- -+#ifdef ENABLE_FFMPEG - if (gIsInFrameH264) { - if (handle->clientBuf[handle->clientRevCount % 1].decoded) { - // Note: bufData already assigned in the function start -@@ -556,7 +564,7 @@ void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - ulock.unlock(); - } - } -- -+#endif - int src_size = mSrcWidth * mSrcHeight; - int dstFrameSize = width * height; - -@@ -793,7 +801,7 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - - // Initialize to the size based on resolution. - out_size = mDstBufSize; -- -+#ifdef ENABLE_FFMPEG - if (gIsInFrameH264) { - if (handle->clientBuf[handle->clientRevCount % 1].decoded) { - // Note: bufData already assigned in the function start -@@ -809,7 +817,7 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - ulock.unlock(); - } - } -- -+#endif - // For default resolotion 640x480p - if (width == (uint32_t)mSrcWidth && height == (uint32_t)mSrcHeight) { - if (gIsInFrameI420) { -@@ -972,7 +980,6 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - #endif - ALOGI(LOG_TAG " %s: Captured NV12 Image sucessfully!!! ", __FUNCTION__); - } -- - void Sensor::captureJPEG(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height) { - ALOGVV("%s: E", __FUNCTION__); - -@@ -1002,6 +1009,7 @@ void Sensor::captureJPEG(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - //Initialize to the size based on resolution. - out_size = mDstJpegBufSize; - -+#ifdef ENABLE_FFMPEG - if (gIsInFrameH264) { - if (handle->clientBuf[handle->clientRevCount % 1].decoded) { - //Note: bufData already assigned in the function start -@@ -1017,6 +1025,7 @@ void Sensor::captureJPEG(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - ulock.unlock(); - } - } -+#endif - - //For default resolution 640x480p - if (width == (uint32_t)mSrcWidth && height == (uint32_t)mSrcHeight) { -@@ -1196,7 +1205,6 @@ void Sensor::captureJPEG(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h - } - ALOGVV("%s: Successfully Converted to NV21 for JPEG Capture!!!", __FUNCTION__); - } -- - void Sensor::captureDepth(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height) { - ALOGVV("%s", __FUNCTION__); - --- -2.17.1 -