diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..c3fd915e123129436c6c1d1effec986a0be54720 --- /dev/null +++ b/.gitignore @@ -0,0 +1,25 @@ +# IDE 配置目录 +.idea/ +.vscode/ + +# 构建系统和缓存目录 +.hvigor/ +entry/.cxx/ +entry/.preview/ +entry/build/ + +# 依赖模块目录 +entry/oh_modules/ +oh_modules/ + +# 日志文件 +idea-lsp-server.log +*.log + +# 操作系统生成的文件 +.DS_Store +Thumbs.db + +# 临时文件 +*.tmp +*.temp \ No newline at end of file diff --git a/entry/src/main/cpp/CMakeLists.txt b/entry/src/main/cpp/CMakeLists.txt index f4e536f25efd6979872794ae844eb3c90c26fd20..82713518e9f970b8b1e494e897252338da7eb22a 100644 --- a/entry/src/main/cpp/CMakeLists.txt +++ b/entry/src/main/cpp/CMakeLists.txt @@ -9,6 +9,8 @@ add_compile_definitions(-DEGL_EGLEXT_PROTOTYPES) # Add global compilation macro ONLY_TEST_ROI_INFO add_compile_definitions(-DONLY_TEST_ROI_INFO=1) +add_compile_definitions(-DDUMP_VIDEO_STREAM=0) + set(CMAKE_CXX_STANDARD_REQUIRED ON) include_directories(${NATIVERENDER_ROOT_PATH} ${NATIVERENDER_ROOT_PATH}/capbilities/codec/include @@ -43,7 +45,7 @@ add_library(recorder SHARED recorder/RecorderNative.cpp capbilities/render/egl_render_context.cpp capbilities/render/render_thread.cpp capbilities/render/shader_program.cpp -) + common/faceProcess/TimestampFaceMap.cpp) target_link_libraries(player PUBLIC ${BASE_LIBRARY}) target_link_libraries(recorder PUBLIC ${BASE_LIBRARY} player) \ No newline at end of file diff --git a/entry/src/main/cpp/capbilities/codec/VideoEncoder.cpp b/entry/src/main/cpp/capbilities/codec/VideoEncoder.cpp index 88e384350bc92406538a5725e8c88ba51a58dfd1..e2ff697c40005cdae5a1265035d765338539e39a 100644 --- a/entry/src/main/cpp/capbilities/codec/VideoEncoder.cpp +++ b/entry/src/main/cpp/capbilities/codec/VideoEncoder.cpp @@ -179,44 +179,23 @@ static void OnNeedInputParameter(OH_AVCodec *codec, uint32_t index, OH_AVFormat { // Retrieve CodecUserRoi instance from user data VideoEncoder::CodecUserRoi* roiUserData = static_cast(userData); - if (!roiUserData || !roiUserData->vencoder) { + if (roiUserData == nullptr || roiUserData->faceMap_ == nullptr) { SAMPLE_LOGE("Invalid user data in OnNeedInputParameter"); OH_VideoEncoder_PushInputParameter(codec, index); return; } - - VideoEncoder* encoder = roiUserData->vencoder; - FaceIntInfo faceInfo = encoder->GetLatestFaceInfo(); - std::string roiInfo; - - // If valid face information is available, use the face region as the ROI - if (faceInfo.valid) { - int32_t left = faceInfo.topLeftX; - int32_t top = faceInfo.topLeftY; - int32_t right = faceInfo.topLeftX + faceInfo.width; - int32_t bottom = faceInfo.topLeftY + faceInfo.height; - - // Construct ROI information string, lowering the QP value in facial regions to enhance image quality - char roiBuffer[100]; - int len = snprintf(roiBuffer, sizeof(roiBuffer), "%d,%d-%d,%d=-4", left, top, right, bottom); - if (len > 0 && len < static_cast(sizeof(roiBuffer))) { - roiInfo = std::string(roiBuffer, len); - } else { - SAMPLE_LOGE("Failed to format ROI string, buffer size: %zu", sizeof(roiBuffer)); - } -#if ONLY_TEST_ROI_INFO - SAMPLE_LOGI("HMOS_LiveStream: ROI face timestamp: %{public}d, ROI: %{public}s", - faceInfo.timestamp, roiBuffer); -#endif - } else { - // If no face information is available, do not set any ROI, allowing the encoder to use global default - // parameters - SAMPLE_LOGI("No valid face info, no ROI set."); + std::string faceInfo = + roiUserData->faceMap_->getFaceDataFromFIFO(5); // wait 5ms + if (faceInfo == "") { + SAMPLE_LOGE("getFaceDataFromFIFO failed"); + OH_VideoEncoder_PushInputParameter(codec, index); + return; } - + SAMPLE_LOGI("HMOS_LiveStream: TimestampFaceMap OnNeedInputParameter face ROI: %{public}s, fifoNumsLeft:%{public}llu, faceMapNumsLeft: %{public}llu", + faceInfo.c_str(), roiUserData->faceMap_->getQueueSize(), roiUserData->faceMap_->getMapSize()); // Set ROI parameters const char* roiKey = ApiCompat_OH_MD_KEY_VIDEO_ENCODER_ROI_PARAMS; - OH_AVFormat_SetStringValue(parameter, roiKey, roiInfo.c_str()); + OH_AVFormat_SetStringValue(parameter, roiKey, faceInfo.c_str()); OH_VideoEncoder_PushInputParameter(codec, index); } // [End ROI_encode] @@ -245,12 +224,18 @@ int32_t VideoEncoder::Configure(const SampleInfo &sampleInfo) height_ = sampleInfo.videoInfo.videoHeight; // Basic video parameter configuration - OH_AVFormat_SetIntValue(format, OH_MD_KEY_WIDTH, sampleInfo.videoInfo.videoWidth); - OH_AVFormat_SetIntValue(format, OH_MD_KEY_HEIGHT, sampleInfo.videoInfo.videoHeight); + OH_AVFormat_SetIntValue(format, OH_MD_KEY_WIDTH, + std::max(sampleInfo.videoInfo.videoWidth, sampleInfo.videoInfo.videoHeight)); + OH_AVFormat_SetIntValue(format, OH_MD_KEY_HEIGHT, + std::max(sampleInfo.videoInfo.videoWidth, sampleInfo.videoInfo.videoHeight)); OH_AVFormat_SetDoubleValue(format, OH_MD_KEY_FRAME_RATE, sampleInfo.videoInfo.frameRate); OH_AVFormat_SetIntValue(format, OH_MD_KEY_PIXEL_FORMAT, sampleInfo.videoInfo.pixelFormat); OH_AVFormat_SetIntValue(format, OH_MD_KEY_VIDEO_ENCODE_BITRATE_MODE, sampleInfo.videoInfo.bitrateMode); - OH_AVFormat_SetLongValue(format, OH_MD_KEY_BITRATE, sampleInfo.videoInfo.bitrate); + if (sampleInfo.videoInfo.bitrateMode == BITRATE_MODE_CQ) { + OH_AVFormat_SetIntValue(format, OH_MD_KEY_QUALITY, 50); + } else { + OH_AVFormat_SetLongValue(format, OH_MD_KEY_BITRATE, sampleInfo.videoInfo.bitrate); + } OH_AVFormat_SetIntValue(format, OH_MD_KEY_PROFILE, sampleInfo.videoInfo.hevcProfile); OH_AVFormat_SetIntValue(format, OH_MD_KEY_I_FRAME_INTERVAL, sampleInfo.videoInfo.iFrameInterval); OH_AVFormat_SetIntValue(format, OH_MD_KEY_RANGE_FLAG, sampleInfo.videoInfo.rangFlag); diff --git a/entry/src/main/cpp/capbilities/codec/include/CodecInfo.h b/entry/src/main/cpp/capbilities/codec/include/CodecInfo.h index 8b318de6733c8bdadae7917aff585d750e941213..ab87e051a56be5457302832908fb9b3cc403ded8 100644 --- a/entry/src/main/cpp/capbilities/codec/include/CodecInfo.h +++ b/entry/src/main/cpp/capbilities/codec/include/CodecInfo.h @@ -52,6 +52,7 @@ struct CodecBufferInfo { : bufferIndex(argBufferIndex), buffer(reinterpret_cast(argBuffer)) { OH_AVBuffer_GetBufferAttr(argBuffer, &attr); + bufferAddr = OH_AVBuffer_GetAddr(argBuffer); }; }; diff --git a/entry/src/main/cpp/capbilities/codec/include/VideoEncoder.h b/entry/src/main/cpp/capbilities/codec/include/VideoEncoder.h index e78a7825c61ee3a4514ff779220e5b140b4ec146..0a44ba731878a4f32a197cb270c5249c818eb5e0 100644 --- a/entry/src/main/cpp/capbilities/codec/include/VideoEncoder.h +++ b/entry/src/main/cpp/capbilities/codec/include/VideoEncoder.h @@ -29,19 +29,22 @@ #include "CodecCallback.h" #include "dfx/error/SampleError.h" #include "SampleLog.h" - +#include "faceProcess/TimestampFaceMap.h" class VideoEncoder { public: // User data structure for ROI parameter callback struct CodecUserRoi { VideoEncoder* vencoder = nullptr; // Pointer to the current encoder instance + std::shared_ptr faceMap_ = nullptr; }; // Constructor: Creates a CodecUserRoi object managed by a smart pointer - VideoEncoder() : userData_(std::make_unique()) + VideoEncoder(std::shared_ptr faceMap): + faceMap_(faceMap), userData_(std::make_unique()) { if (userData_) { userData_->vencoder = this; + userData_->faceMap_ = faceMap_; } } @@ -66,8 +69,8 @@ public: { return userData_.get(); } - private: + std::shared_ptr faceMap_ = nullptr; // Internal helper methods int32_t SetCallback(CodecUserData *codecUserData); int32_t Configure(const SampleInfo &sampleInfo); diff --git a/entry/src/main/cpp/capbilities/render/include/render_thread.h b/entry/src/main/cpp/capbilities/render/include/render_thread.h index 25da00125b93bdce28c5a9eed02b53c92578d6f7..0bea1014d27e98be9cc3e288e145024508429c0f 100644 --- a/entry/src/main/cpp/capbilities/render/include/render_thread.h +++ b/entry/src/main/cpp/capbilities/render/include/render_thread.h @@ -26,7 +26,7 @@ #include #include #include - +#include "faceProcess/TimestampFaceMap.h" #include "egl_render_context.h" #include "shader_program.h" @@ -35,7 +35,7 @@ using RenderTask = std::function; class RenderThread { public: - RenderThread(); + RenderThread(std::shared_ptr faceMap); ~RenderThread() noexcept; void UpdateNativeWindow(void *window, uint64_t width, uint64_t height); @@ -74,6 +74,9 @@ public: void SetCameraFront(bool isCameraFront); void UpdateCameraRotation(int rotation); private: + std::atomic isFirstFrame_{true}; + int64_t firstPts_{0}; + std::shared_ptr faceMap_ = nullptr; int xcomponentHeight_ = 0; int xcomponentWidth_ = 0; OHNativeWindow *xcomponentWindows_{nullptr}; @@ -144,8 +147,8 @@ private: GLuint outTexId_ = 9999U; std::atomic isCameraFront_ = false; std::atomic cameraRotation_{270}; - std::array drawCameraImageMatrix_; - std::array drawImageMatrix_; + std::array drawCameraImageMatrix_ __attribute__((aligned(16))); + std::array drawImageMatrix_ __attribute__((aligned(16))); OH_NativeBuffer *imageBuffer_ = nullptr; struct { uint32_t *data = nullptr; diff --git a/entry/src/main/cpp/capbilities/render/render_thread.cpp b/entry/src/main/cpp/capbilities/render/render_thread.cpp index e505f1fef3d34c9543356a578bd9c98117452b11..72dd00e21797f63e7f8fadd6c02ed26dd723e190 100644 --- a/entry/src/main/cpp/capbilities/render/render_thread.cpp +++ b/entry/src/main/cpp/capbilities/render/render_thread.cpp @@ -63,16 +63,17 @@ static void LeftProd(float* lmatrix, float* rmatrix, float* out) } std::string vertexShader = R"delimiter( -attribute vec3 position; -attribute vec2 texCoord; -varying vec2 vTexCoord; +layout(location = 0) in vec3 position; +layout(location = 1) in vec2 texCoord; + +out vec2 vTexCoord; + uniform mat4 matTransform; void main() { - gl_Position = vec4(position, 1.0); - vec4 rotatedUV = matTransform * vec4(texCoord, 0.0, 1.0); - vTexCoord = rotatedUV.xy; + gl_Position = matTransform * vec4(position, 1.0); + vTexCoord = texCoord; } )delimiter"; @@ -132,23 +133,29 @@ void main() )delimiter"; std::string frameFragmentShader = R"delimiter( -#extension GL_OES_EGL_image_external : require +#extension GL_OES_EGL_image_external_essl3 : require precision highp float; -varying vec2 vTexCoord; +precision mediump int; +in highp vec2 vTexCoord; +uniform float width; +uniform float height; + +uniform samplerExternalOES tex; +out highp vec4 outColor; -uniform samplerExternalOES texture; void main() { - gl_FragColor = texture2D(texture, vTexCoord); + highp vec4 color = texture(tex, vTexCoord); + outColor = color; } )delimiter"; GLfloat vertices[] = { // positions // texture coords - -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, // top left - -1.0f, -1.0f, 0.0f, 0.0f, 1.0f, // bottom left - 1.0f, -1.0f, 0.0f, 1.0f, 1.0f, // bottom right - 1.0f, 1.0f, 0.0f, 1.0f, 0.0f // top right + -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, // top left + -1.0f, -1.0f, 0.0f, 1.0f, 0.0f, // bottom left + 1.0f, -1.0f, 0.0f, 0.0f, 0.0f, // bottom right + 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top right }; GLuint indices[] = { @@ -157,8 +164,8 @@ GLuint indices[] = { }; } // namespace Detail -RenderThread::RenderThread() -{ +RenderThread::RenderThread(std::shared_ptr faceMap) + : faceMap_(faceMap) { Start(); } @@ -191,14 +198,13 @@ void RenderThread::CleanGLResources() imageShader_.reset(); } -bool RenderThread::CreateGLResources() -{ +bool RenderThread::CreateGLResources() { videoShader_ = std::make_unique(VERSION_GLSL + Detail::vertexShader, VERSION_GLSL + Detail::fragmentShader); imageShader_ = std::make_unique( VERSION_GLSL + Detail::vertexShader, VERSION_GLSL + TEXTURE_2D_SRC + Detail::fragmentShader); frameShader_ = std::make_unique( - Detail::vertexShader, Detail::frameFragmentShader); + VERSION310_GLSL + Detail::vertexShader, VERSION310_GLSL + Detail::frameFragmentShader); if (!videoShader_->Valid() || !imageShader_->Valid()) { return false; @@ -369,7 +375,7 @@ void RenderThread::Start() if (running_) { return; } - + isFirstFrame_.store(true); running_ = true; thread_ = std::thread([this]() { ThreadMainLoop(); @@ -660,6 +666,40 @@ void RenderThread::ImageDraw(OHNativeWindowBuffer *InBuffer, OHNativeWindowBuffe renderContext_->DeleteEGLImage(imgOut); } +#ifdef __ARM_NEON +#include +#endif + +void matrix_multiply_neon(const float* a, float* b, float* c) +{ +#if defined(__ARM_NEON) || defined(__ARM_NEON__) + // Use NEON acceleration + float32x4_t b0 = vld1q_f32(b); + float32x4_t b1 = vld1q_f32(b + 4); + float32x4_t b2 = vld1q_f32(b + 8); + float32x4_t b3 = vld1q_f32(b + 12); + + for (int i = 0; i < 4; i++) { + float32x4_t a_row = vld1q_f32(a + i * 4); + float32x4_t c_row = vmulq_laneq_f32(b0, a_row, 0); + c_row = vfmaq_laneq_f32(c_row, b1, a_row, 1); + c_row = vfmaq_laneq_f32(c_row, b2, a_row, 2); + c_row = vfmaq_laneq_f32(c_row, b3, a_row, 3); + vst1q_f32(c + i * 4, c_row); + } +#else + // Fall back to standard C implementation + for (int i = 0; i < 4; i++) { + for (int j = 0; j < 4; j++) { + c[i*4+j] = 0; + for (int k = 0; k < 4; k++) { + c[i*4+j] += a[i*4+k] * b[k*4+j]; + } + } + } +#endif +} + void RenderThread::DrawImage() { OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_PRINT_DOMAIN, "RenderThread", "DrawImage."); @@ -680,12 +720,34 @@ void RenderThread::DrawImage() int64_t pts = OH_NativeImage_GetTimestamp(nativeImage_); OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_PRINT_DOMAIN, "RenderThread", - "HMOS_LiveStream: ROI OH_NativeImage_GeTimestamp pts %{public}lld", pts); - - if (OH_NativeImage_GetTransformMatrixV2(nativeImage_, drawCameraImageMatrix_.data())) { - OH_LOG_Print(LOG_APP, LOG_WARN, LOG_PRINT_DOMAIN, - "RenderThread", "OH_NativeImage_GetTransformMatrix failed!"); - return; + "HMOS_LiveStream: ROI OH_NativeImage_GetTimestamp pts %{public}lld", pts); + float mirrorNativeImageMatrix[16] __attribute__((aligned(16))); + if (isCameraFront_) { + float nativeImageMatrix[16] __attribute__((aligned(16))); + const float mirrorMatrix[16] __attribute__((aligned(16))) { + -1, 0, 0, 0, + 0, -1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + }; + if (OH_NativeImage_GetTransformMatrixV2(nativeImage_, nativeImageMatrix)) { + OH_LOG_Print(LOG_APP, LOG_WARN, LOG_PRINT_DOMAIN, + "RenderThread", "OH_NativeImage_GetTransformMatrix failed!"); + return; + } + nativeImageMatrix[12] = 0.0f; + nativeImageMatrix[13] = 0.0f; + nativeImageMatrix[14] = 0.0f; + matrix_multiply_neon(mirrorMatrix, nativeImageMatrix,mirrorNativeImageMatrix); + } else { + if (OH_NativeImage_GetTransformMatrixV2(nativeImage_, mirrorNativeImageMatrix)) { + OH_LOG_Print(LOG_APP, LOG_WARN, LOG_PRINT_DOMAIN, + "RenderThread", "OH_NativeImage_GetTransformMatrix failed!"); + return; + } + mirrorNativeImageMatrix[12] = 0.0f; + mirrorNativeImageMatrix[13] = 0.0f; + mirrorNativeImageMatrix[14] = 0.0f; } ret = OH_NativeWindow_NativeObjectReference(InBuffer); @@ -733,6 +795,11 @@ void RenderThread::DrawImage() } while (retCode == -1 && (errno == EINTR || errno == EAGAIN)); close(fenceFd3); // Prevent file descriptor leaks } + if (isFirstFrame_) { + firstPts_ = pts; + isFirstFrame_ = false; + } + OH_NativeWindow_NativeWindowHandleOpt(encoderNativeWindow_, SET_UI_TIMESTAMP, (pts - firstPts_) / 1000); int viewWidth = 0; int viewHeight = 0; OH_NativeWindow_NativeWindowHandleOpt(encoderNativeWindow_, GET_BUFFER_GEOMETRY, &viewHeight, &viewWidth); @@ -745,14 +812,65 @@ void RenderThread::DrawImage() OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_PRINT_DOMAIN, "RenderThread", "OH_NativeWindow_NativeWindowHandleOpt get Xcomponent size failed!"); } - OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_PRINT_DOMAIN, "RenderThread", - "xcomponentHeight_:%{public}d, xcomponentWidth_:%{public}d", xcomponentHeight_, xcomponentWidth_); int imageRotation = cameraRotation_; + OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_PRINT_DOMAIN, "RenderThread", + "FHMFHM imageRotation:%{public}d, imageSize::%{public}dx%{public}d xcomponent:%{public}dx%{public}d", + imageRotation, viewWidth, viewHeight, xcomponentWidth_, xcomponentHeight_); + std::array rotationNativeImageMatrix __attribute__((aligned(16))); + switch (imageRotation) { + case 0: + rotationNativeImageMatrix = { + 0, -1, 0, 0, + 1, 0, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1}; + break; + case 180: + rotationNativeImageMatrix = { + 0, 1, 0, 0, + -1, 0, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1}; + break; + case 90: + rotationNativeImageMatrix = { + 1, 0, 0, 0, + 0, -1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1}; + break; + default: + rotationNativeImageMatrix = { + 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1}; + } if (imageRotation == 0 || imageRotation == 180) { + //(void)OH_NativeWindow_NativeWindowHandleOpt(encoderNativeWindow_, SET_BUFFER_GEOMETRY, + // static_cast(viewHeight), static_cast(viewWidth)); + if (faceMap_) { + faceMap_->queryAndPushToFIFO(pts, viewHeight, viewWidth); + } + std::copy(std::begin(mirrorNativeImageMatrix), + std::end(mirrorNativeImageMatrix), + drawCameraImageMatrix_.begin()); ImageDraw(InBuffer, OutBuffer, viewHeight, viewWidth, xcomponentWidth_, xcomponentHeight_); - ImageDraw(InBuffer, OutBufferEncoder, viewHeight, viewWidth, viewWidth, viewHeight); + matrix_multiply_neon(mirrorNativeImageMatrix, + rotationNativeImageMatrix.data(),drawCameraImageMatrix_.data()); + ImageDraw(InBuffer, OutBufferEncoder, viewWidth, viewHeight, viewWidth, viewHeight); } else { + //(void)OH_NativeWindow_NativeWindowHandleOpt(encoderNativeWindow_, SET_BUFFER_GEOMETRY, + // static_cast(viewWidth), static_cast(viewHeight)); + if (faceMap_) { + faceMap_->queryAndPushToFIFO(pts, viewWidth, viewHeight); + } + std::copy(std::begin(mirrorNativeImageMatrix), + std::end(mirrorNativeImageMatrix), + drawCameraImageMatrix_.begin()); ImageDraw(InBuffer, OutBuffer, viewWidth, viewHeight, xcomponentWidth_, xcomponentHeight_); + matrix_multiply_neon(mirrorNativeImageMatrix, + rotationNativeImageMatrix.data(),drawCameraImageMatrix_.data()); ImageDraw(InBuffer, OutBufferEncoder, viewWidth, viewHeight, viewWidth, viewHeight); } diff --git a/entry/src/main/cpp/common/SampleInfo.h b/entry/src/main/cpp/common/SampleInfo.h index 218e741b8dce36f8ed8d05c6a5040b04f0005aa5..6f980dcc1dc7151cfe41d73689282cd88b613fc6 100644 --- a/entry/src/main/cpp/common/SampleInfo.h +++ b/entry/src/main/cpp/common/SampleInfo.h @@ -44,7 +44,11 @@ struct VideoInfo { int64_t bitrate = 3 * 1024 * 1024; // 3Mbps; int64_t frameInterval = 0; OH_AVPixelFormat pixelFormat = AV_PIXEL_FORMAT_NV12; - uint32_t bitrateMode = VBR; +#if DUMP_VIDEO_STREAM + uint32_t bitrateMode = BITRATE_MODE_CQ; +#else + uint32_t bitrateMode = BITRATE_MODE_VBR; +#endif int32_t iFrameInterval = 2000; int32_t rangFlag = 1; int32_t isHDRVivid = 0; diff --git a/entry/src/main/cpp/common/faceProcess/TimestampFaceMap.cpp b/entry/src/main/cpp/common/faceProcess/TimestampFaceMap.cpp new file mode 100644 index 0000000000000000000000000000000000000000..84e04d25fb2955ae4dc90ef25a867235368b586e --- /dev/null +++ b/entry/src/main/cpp/common/faceProcess/TimestampFaceMap.cpp @@ -0,0 +1,208 @@ +#include "TimestampFaceMap.h" +#include +#include +#include +#include +#include "TimestampFaceMap.h" + +#ifndef LOG_TAG_FACE +#define LOG_TAG_FACE "TimestampFaceMap" +#endif +constexpr uint32_t LOG_PRINT_DOMAIN = 0xFF00; +// 日志级别定义 +#define SAMPLE_LOGD(fmt, ...) \ + OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_PRINT_DOMAIN, LOG_TAG_FACE, fmt, ##__VA_ARGS__) + +#define SAMPLE_LOGI(fmt, ...) \ + OH_LOG_Print(LOG_APP, LOG_INFO, LOG_PRINT_DOMAIN, LOG_TAG_FACE, fmt, ##__VA_ARGS__) + +#define SAMPLE_LOGW(fmt, ...) \ + OH_LOG_Print(LOG_APP, LOG_WARN, LOG_PRINT_DOMAIN, LOG_TAG_FACE, fmt, ##__VA_ARGS__) + +#define SAMPLE_LOGE(fmt, ...) \ + OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_PRINT_DOMAIN, LOG_TAG_FACE, fmt, ##__VA_ARGS__) + +// 默认构造函数 +TimestampFaceMap::TimestampFaceMap() + : stopFlag_(false), + lastTimestamp_(0), + timestampRollover_(false), + maxMapSize_(1000), + cleanupThreshold_(800) { + SAMPLE_LOGI("TimestampFaceMap initialized with default settings"); + SAMPLE_LOGI("Max map size: %{public}zu, Cleanup threshold: %{public}zu", + maxMapSize_, cleanupThreshold_); +} + +// 带参数的构造函数 +TimestampFaceMap::TimestampFaceMap(size_t maxMapSize) + : stopFlag_(false), + lastTimestamp_(0), + timestampRollover_(false), + maxMapSize_(maxMapSize), + cleanupThreshold_(maxMapSize * 0.8) { + SAMPLE_LOGI("TimestampFaceMap initialized with custom settings"); + SAMPLE_LOGI("Max map size: %{public}zu, Cleanup threshold: %{public}zu", + maxMapSize_, cleanupThreshold_); +} + +TimestampFaceMap::~TimestampFaceMap() { + stop(); + SAMPLE_LOGD("TimestampFaceMap destroyed"); +} + +void TimestampFaceMap::start() { + stopFlag_ = false; + SAMPLE_LOGI("TimestampFaceMap started"); +} + +void TimestampFaceMap::addFaceData(int64_t timestamp, const std::vector& faceList) { + if (isStopped()) return; + + std::lock_guard lock(mapMutex_); + + // 处理时间戳翻转 + handleTimestampRollover(timestamp); + + // 插入新数据 + faceMap_[timestamp] = faceList; + SAMPLE_LOGI("TimestampFaceMap->addFaceData ROI timestamp %{public}lld, faceNums %{public}d, mapSize %{public}llu", + timestamp, faceList.size(), faceMap_.size()); + // 如果超过阈值,清理旧数据 + if (faceMap_.size() > cleanupThreshold_) { + cleanupOldData(); + } +} + +std::string TimestampFaceMap::serializeFaceData( + const std::vector& faceList, int width, int height) { + std::ostringstream oss; + for (size_t i = 0; i < faceList.size(); ++i) { + const auto& face = faceList[i]; + int32_t left = static_cast(face.topLeftX * width); + int32_t top = static_cast(face.topLeftY * height); + int32_t right = static_cast(face.width * width) + left; + int32_t bottom = static_cast(face.height * height) + top; + oss << top << "," << left << "-" << bottom << "," << right << "=-4"; + if (i < faceList.size() - 1) { + oss << ";"; + } + } + return oss.str(); +} + +void TimestampFaceMap::queryAndPushToFIFO(int64_t queryTimestamp, int width, int height) { + if (isStopped()) return; + std::vector faceList; + { + std::lock_guard lock(mapMutex_); + // 处理时间戳翻转 + handleTimestampRollover(queryTimestamp); + // 查找数据 + auto it = faceMap_.find(queryTimestamp); + if (it != faceMap_.end()) { + faceList = it->second; + // 清理查询时间戳之前的所有旧数据 + cleanupDataBefore(queryTimestamp); + } + } + // 将面部数据序列化为字符串并推送到FIFO队列 + if (!faceList.empty()) { + std::string faceStr = serializeFaceData(faceList, width, height); + SAMPLE_LOGI("TimestampFaceMap->queryAndPushToFIFO found ROI queryTimestamp %{public}lld faceStr %{public}s", + queryTimestamp, faceStr.c_str()); + std::lock_guard lock(queueMutex_); + fifoQueue_.push(std::move(faceStr)); + queueCV_.notify_one(); // 通知消费者 + } else { + SAMPLE_LOGD("TimestampFaceMap->queryAndPushToFIFO not found ROI queryTimestamp %{public}lld", + queryTimestamp); + } +} + +std::string TimestampFaceMap::getFaceDataFromFIFO(int timeoutMs) { + std::unique_lock lock(queueMutex_); + if (queueCV_.wait_for(lock, std::chrono::milliseconds(timeoutMs), + [this]() { return !fifoQueue_.empty() || stopFlag_; })) { + if (!fifoQueue_.empty()) { + auto data = std::move(fifoQueue_.front()); + fifoQueue_.pop(); + return data; + } + } + return ""; // 超时或停止,返回空字符串 +} + +void TimestampFaceMap::stop() { + if (!stopFlag_) { + stopFlag_ = true; + queueCV_.notify_all(); + SAMPLE_LOGI("TimestampFaceMap stopped"); + } +} + +size_t TimestampFaceMap::getQueueSize() { + std::lock_guard lock(queueMutex_); + return fifoQueue_.size(); +} + +size_t TimestampFaceMap::getMapSize() { + std::lock_guard lock(mapMutex_); + return faceMap_.size(); +} + +bool TimestampFaceMap::isStopped() const { + return stopFlag_; +} + +void TimestampFaceMap::setMaxMapSize(size_t size) { + maxMapSize_ = size; + cleanupThreshold_ = size * 0.8; // 自动调整清理阈值 +} + +void TimestampFaceMap::setCleanupThreshold(size_t threshold) { + cleanupThreshold_ = threshold; +} + +void TimestampFaceMap::handleTimestampRollover(int64_t currentTimestamp) { + if (currentTimestamp < lastTimestamp_) { + // 检测到时间戳翻转 + timestampRollover_ = true; + SAMPLE_LOGW("Timestamp rollover detected: %{public}lld -> %{public}lld", + lastTimestamp_, currentTimestamp); + } + lastTimestamp_ = currentTimestamp; +} + +void TimestampFaceMap::cleanupOldData() { + if (faceMap_.size() > maxMapSize_) { + size_t removeCount = faceMap_.size() - maxMapSize_; + auto it = faceMap_.begin(); + std::advance(it, removeCount); + faceMap_.erase(faceMap_.begin(), it); + SAMPLE_LOGI("Cleaned up %{public}zu old entries, current map size: %{public}zu", + removeCount, faceMap_.size()); + } +} + +void TimestampFaceMap::cleanupDataBefore(int64_t timestamp) { + if (timestampRollover_) { + // 时间戳翻转后的特殊处理 + auto it = faceMap_.begin(); + while (it != faceMap_.end()) { + // 如果时间戳比当前查询时间戳小很多,则清理 + if (it->first < timestamp - 1000) { + it = faceMap_.erase(it); + } else { + ++it; + } + } + timestampRollover_ = false; // 重置翻转标志 + } else { + // 正常情况下的清理:清理所有小于等于当前时间戳的数据(包括当前时间戳) + auto it = faceMap_.upper_bound(timestamp); // 使用 upper_bound 而不是 lower_bound + if (it != faceMap_.begin()) { + faceMap_.erase(faceMap_.begin(), it); + } + } +} \ No newline at end of file diff --git a/entry/src/main/cpp/common/faceProcess/TimestampFaceMap.h b/entry/src/main/cpp/common/faceProcess/TimestampFaceMap.h new file mode 100644 index 0000000000000000000000000000000000000000..7330e735c812ce697f4212126c33f58ee2b44383 --- /dev/null +++ b/entry/src/main/cpp/common/faceProcess/TimestampFaceMap.h @@ -0,0 +1,77 @@ +#ifndef TIMESTAMP_FACE_MAP_H +#define TIMESTAMP_FACE_MAP_H + +#include +#include +#include +#include +#include +#include +#include + +struct SimpleFaceInfo { + double topLeftX; + double topLeftY; + double width; + double height; +}; + +struct FaceDataWithRotation { + std::vector faceList; + int rotation; +}; + +class TimestampFaceMap { +public: + TimestampFaceMap(); + TimestampFaceMap(size_t maxMapSize); // 带参数的构造函数 + ~TimestampFaceMap(); + + // 禁止拷贝和赋值 + TimestampFaceMap(const TimestampFaceMap&) = delete; + TimestampFaceMap& operator=(const TimestampFaceMap&) = delete; + + // 生产者线程接口 + void addFaceData(int64_t timestamp, const std::vector& faceList); + void queryAndPushToFIFO(int64_t queryTimestamp, int width, int height); + + // 消费者线程接口 + std::string getFaceDataFromFIFO(int timeoutMs = 10); + + // 控制接口 + void stop(); + void start(); + + // 状态查询 + size_t getQueueSize(); + size_t getMapSize(); + bool isStopped() const; + + // 配置接口 + void setMaxMapSize(size_t size); + void setCleanupThreshold(size_t threshold); + +private: + // 内部处理方法 + void handleTimestampRollover(int64_t currentTimestamp); + void cleanupOldData(); + void cleanupDataBefore(int64_t timestamp); + std::string serializeFaceData(const std::vector& faceList, int width, int height); + // 成员变量 + std::map> faceMap_; + std::queue fifoQueue_; + std::mutex mapMutex_; + std::mutex queueMutex_; + std::condition_variable queueCV_; + std::atomic stopFlag_; + + // 时间戳翻转处理 + int64_t lastTimestamp_; + bool timestampRollover_; + + // 配置参数 + size_t maxMapSize_; + size_t cleanupThreshold_; +}; + +#endif // TIMESTAMP_FACE_MAP_H \ No newline at end of file diff --git a/entry/src/main/cpp/recorder/Recorder.cpp b/entry/src/main/cpp/recorder/Recorder.cpp index cb1b69651a2de7e6c318ca9bf7b6284a39179893..fd9e5881efb2f3845c40f26f832aff4201da9e86 100644 --- a/entry/src/main/cpp/recorder/Recorder.cpp +++ b/entry/src/main/cpp/recorder/Recorder.cpp @@ -43,16 +43,17 @@ int32_t Recorder::Init(SampleInfo &sampleInfo) sampleInfo_ = sampleInfo; sampleInfo_.videoInfo.videoWidth = sampleInfo.videoInfo.videoHeight; sampleInfo_.videoInfo.videoHeight = sampleInfo.videoInfo.videoWidth; + faceMap_->start(); if (!sampleInfo_.videoInfo.isHDRVivid) { // Create Render Thread - renderThread_ = std::make_unique(); + renderThread_ = std::make_unique(faceMap_); CHECK_AND_RETURN_RET_LOG(renderThread_ != nullptr, SAMPLE_ERR_ERROR, "renderThread_ is nullptr."); } // Audio Capturer Init audioEncoder_ = std::make_unique(); audioCapturer_ = std::make_unique(); - videoEncoder_ = std::make_unique(); + videoEncoder_ = std::make_unique(faceMap_); muxer_ = std::make_unique(); int32_t ret = muxer_->Create(sampleInfo_.fileInfo.outputFd); @@ -92,12 +93,11 @@ void Recorder::UpdateInfoForCamera(bool isFront) } } -void Recorder::UpdateFaceInfoFromCamera(FaceInfo faceInfo) +void Recorder::UpdateFaceInfoFromCamera(int64_t timestamp, std::vector faceList) { - OH_LOG_Print(LOG_APP, LOG_DEBUG, 0xFF00, - "UpdateFaceInfoFromCamera", "face %{public}f.", faceInfo.topLeftX); - if (videoEncoder_) { - videoEncoder_->UpdateFaceInfoFromCamera(faceInfo); + if (faceMap_) { + SAMPLE_LOGI("Recorder::UpdateFaceInfoFromCamera ROI start, faceNums %{public}llu.", faceList.size()); + faceMap_->addFaceData(timestamp, faceList); } } @@ -165,6 +165,52 @@ int32_t Recorder::Start(std::string previewSurfaceId) return SAMPLE_ERR_OK; } +#if DUMP_VIDEO_STREAM +void Recorder::WriteVideoBufferToFile(const CodecBufferInfo& bufferInfo) +{ + //OH_LOG_Print(LOG_APP, LOG_DEBUG, 0xFFF, + // "WriteVideoBufferToFile", "FHMFHMFHM %{public}d", __LINE__); + static std::once_flag fileInitFlag; + std::call_once(fileInitFlag, [this]() { + // 生成文件名:当前时间格式为 YYYYMMDD_HHMMSS + auto now = std::chrono::system_clock::now(); + auto time_t = std::chrono::system_clock::to_time_t(now); + std::tm tm = *std::localtime(&time_t); + + char filename[100]; + std::strftime(filename, sizeof(filename), "/data/storage/el2/base/haps/entry/files/%Y%m%d_%H%M%S_recorder.h265", &tm); + videoFileName_ = filename; + + videoFile_.open(videoFileName_, std::ios::binary | std::ios::out | std::ios::app); + if (videoFile_.is_open()) { + OH_LOG_Print(LOG_APP, LOG_WARN, 0xFFF, + "WriteVideoBufferToFile", "Successfully created video data file: %{public}s", videoFileName_.c_str()); + } else { + OH_LOG_Print(LOG_APP, LOG_ERROR, 0xFFF, + "WriteVideoBufferToFile", "Failed to create video data file: %{public}s", videoFileName_.c_str()); + } + }); + + // 写入数据到文件 + if (videoFile_.is_open() && bufferInfo.attr.size > 0 && bufferInfo.bufferAddr) { + // 假设 bufferInfo.buffer 是原始数据指针,根据实际情况可能需要调整 + videoFile_.write(reinterpret_cast(bufferInfo.bufferAddr), bufferInfo.attr.size); + OH_LOG_Print(LOG_APP, LOG_DEBUG, 0xFFF, + "WriteVideoBufferToFile", "fwrite size:%{public}llu 0x%{public}x-%{public}x-%{public}x-%{public}x, file: %{public}s", + bufferInfo.attr.size, + bufferInfo.bufferAddr[0], + bufferInfo.bufferAddr[1], + bufferInfo.bufferAddr[2], + bufferInfo.bufferAddr[3], + videoFileName_.c_str()); + if (!videoFile_.good()) { + OH_LOG_Print(LOG_APP, LOG_ERROR, 0xFFF, + "WriteVideoBufferToFile", "Failed to write video data to file: %{public}s", videoFileName_.c_str()); + } + } +} +#endif + void Recorder::VideoEncOutputThread() { while (true) { @@ -182,26 +228,35 @@ void Recorder::VideoEncOutputThread() lock.unlock(); // codec_data is not considered a frame. if ((bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_CODEC_DATA) && isFirstCodecData_) { - bufferInfo.attr.pts = 0; + // bufferInfo.attr.pts = 0; isFirstCodecData_.store(false); } if ((bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_SYNC_FRAME) || (bufferInfo.attr.flags == AVCODEC_BUFFER_FLAGS_NONE)) { - if (!isFirstSyncFrame_) { - bufferInfo.attr.pts = encContext_->outputFrameCount * MICROSECOND / sampleInfo_.videoInfo.frameRate; - } + // if (!isFirstSyncFrame_) { + // bufferInfo.attr.pts = encContext_->outputFrameCount * MICROSECOND / sampleInfo_.videoInfo.frameRate; + // } encContext_->outputFrameCount++; isFirstSyncFrame_.store(false); } - SAMPLE_LOGW("Video encoder buffer count: %{public}u, size: %{public}d, flag: %{public}u, pts: %{public}" PRId64, + SAMPLE_LOGD("Video encoder buffer count: %{public}u, size: %{public}d, flag: %{public}u, pts: %{public}" PRId64, encContext_->outputFrameCount, bufferInfo.attr.size, bufferInfo.attr.flags, bufferInfo.attr.pts); - +#if DUMP_VIDEO_STREAM + WriteVideoBufferToFile(bufferInfo); +#endif muxer_->WriteSample(muxer_->GetVideoTrackId(), reinterpret_cast(bufferInfo.buffer), bufferInfo.attr); int32_t ret = videoEncoder_->FreeOutputBuffer(bufferInfo.bufferIndex); CHECK_AND_BREAK_LOG(ret == SAMPLE_ERR_OK, "Encoder output thread out"); } +#if DUMP_VIDEO_STREAM + if (videoFile_.is_open()) { + videoFile_.close(); + OH_LOG_Print(LOG_APP, LOG_WARN, 0xFFF, + "WriteVideoBufferToFile", "Video data file closed: %{public}s", videoFileName_.c_str()); + } +#endif SAMPLE_LOGI("Exit, frame count: %{public}u", encContext_->outputFrameCount); StartRelease(); } diff --git a/entry/src/main/cpp/recorder/RecorderNative.cpp b/entry/src/main/cpp/recorder/RecorderNative.cpp index 9b7a295560a921b3f783ebec8e786c462088547a..c77aa36dc4c2a4f60d9f2185b107e91d94e3e116 100644 --- a/entry/src/main/cpp/recorder/RecorderNative.cpp +++ b/entry/src/main/cpp/recorder/RecorderNative.cpp @@ -300,22 +300,49 @@ napi_value RecorderNative::Stop(napi_env env, napi_callback_info info) napi_value RecorderNative::UpdateFaceInfoFromCamera(napi_env env, napi_callback_info info) { - FaceInfo faceInfo; - size_t argc = 7; - napi_value args[7] = {nullptr}; + size_t argc = 4; + napi_value args[4] = {nullptr}; napi_get_cb_info(env, info, &argc, args, nullptr, nullptr); + if (argc < 4) { + napi_throw_error(env, nullptr, "Wrong number of arguments for UpdateFaceInfoFromCamera"); + return nullptr; + } int64_t addrValue = 0; bool flag = false; napi_get_value_bigint_int64(env, args[0], &addrValue, &flag); Recorder *recorder = reinterpret_cast(addrValue); - napi_get_value_bool(env, args[1], &faceInfo.valid); - napi_get_value_int64(env, args[2], &faceInfo.timestamp); - napi_get_value_double(env, args[3], &faceInfo.topLeftX); - napi_get_value_double(env, args[4], &faceInfo.topLeftY); - napi_get_value_double(env, args[5], &faceInfo.width); - napi_get_value_double(env, args[6], &faceInfo.height); - if (recorder) { - recorder->UpdateFaceInfoFromCamera(faceInfo); + bool faceValid = false; + napi_get_value_bool(env, args[1], &faceValid); + int64_t timestamp = 0; + napi_get_value_int64(env, args[2], ×tamp); + + napi_value faceListArray = args[3]; + bool isArray; + napi_is_array(env, faceListArray, &isArray); + if (!isArray) { + napi_throw_error(env, nullptr, "Fourth argument must be an array!"); + return nullptr; + } + uint32_t arrayLength; + napi_get_array_length(env, faceListArray, &arrayLength); + std::vector faceListCpp; + for (uint32_t i = 0; i < arrayLength; i++) { + SimpleFaceInfo faceInfoItem = {0}; + napi_value faceInfoObject; + napi_get_element(env, faceListArray, i, &faceInfoObject); + napi_value propValue; + napi_get_named_property(env, faceInfoObject, "topLeftX", &propValue); + napi_get_value_double(env, propValue, &faceInfoItem.topLeftX); + napi_get_named_property(env, faceInfoObject, "topLeftY", &propValue); + napi_get_value_double(env, propValue, &faceInfoItem.topLeftY); + napi_get_named_property(env, faceInfoObject, "width", &propValue); + napi_get_value_double(env, propValue, &faceInfoItem.width); + napi_get_named_property(env, faceInfoObject, "topLeftX", &propValue); + napi_get_value_double(env, propValue, &faceInfoItem.height); + faceListCpp.push_back(faceInfoItem); + } + if (recorder && faceValid) { + recorder->UpdateFaceInfoFromCamera(timestamp, faceListCpp); } return nullptr; } diff --git a/entry/src/main/cpp/recorder/include/Recorder.h b/entry/src/main/cpp/recorder/include/Recorder.h index 2cd0064d347c31ae9fca9ebb51ec119da7a19639..503ae396fc160ec36fb528b7d8123cea351ea78d 100644 --- a/entry/src/main/cpp/recorder/include/Recorder.h +++ b/entry/src/main/cpp/recorder/include/Recorder.h @@ -15,7 +15,7 @@ #ifndef RECODER_H #define RECODER_H - +#include "faceProcess/TimestampFaceMap.h" #include "AudioEncoder.h" #include "Muxer.h" #include "SampleInfo.h" @@ -24,7 +24,12 @@ #include #include #include - +#if DUMP_VIDEO_STREAM +#include +#include +#include +#include +#endif #include #include @@ -33,9 +38,13 @@ #include "../../capbilities/codec/include/AudioCapturer.h" #include "../../capbilities/render/include/render_thread.h" + + class Recorder { public: - Recorder(){}; + Recorder(){ + faceMap_ = std::make_shared(500); + }; ~Recorder(); int32_t Init(SampleInfo &sampleInfo); @@ -45,8 +54,9 @@ public: void UpdateInfoForCamera(bool isFront); void UpdateCameraRotation(int rotation); - void UpdateFaceInfoFromCamera(FaceInfo faceInfo); + void UpdateFaceInfoFromCamera(int64_t timestamp, std::vector faceListCpp); private: + std::shared_ptr faceMap_ = nullptr; void VideoEncOutputThread(); void AudioEncInputThread(); void AudioEncOutputThread(); @@ -78,6 +88,11 @@ private: std::unique_ptr audioCapturer_ = nullptr; std::unique_ptr renderThread_ = nullptr; +#if DUMP_VIDEO_STREAM + std::ofstream videoFile_; // 视频数据文件流 + std::string videoFileName_; // 视频文件名 + void WriteVideoBufferToFile(const CodecBufferInfo& bufferInfo); +#endif }; #endif // RECODER_H \ No newline at end of file diff --git a/entry/src/main/cpp/types/librecorder/index.d.ts b/entry/src/main/cpp/types/librecorder/index.d.ts index 550ee22db02b3a244f5f0a5679940a67350c61cf..b95b78f8ccd8991ae0e4de47faa9dccb8df0a00f 100644 --- a/entry/src/main/cpp/types/librecorder/index.d.ts +++ b/entry/src/main/cpp/types/librecorder/index.d.ts @@ -13,6 +13,13 @@ * limitations under the License. */ +interface FaceBoundingBox { + topLeftX: number; + topLeftY: number; + width: number; + height: number; +} + export const createRecorder: () => bigint; export const releaseRecorder: (objAddr: bigint) => void; @@ -31,8 +38,11 @@ export const UpdateCameraRotation: (objAddr: bigint, rotation: number) => void export const stopNative: (objAddr: bigint) => Promise export const UpdateFaceInfoFromCamera: ( - objAddr: bigint, valid: boolean, timestamp: number, topLeftX: number, topLeftY: number, - width: number, height: number) => void + objAddr: bigint, + valid: boolean, + timestamp: number, + faceList: Array +) => void export class Response { code: number diff --git a/entry/src/main/ets/controller/CameraController.ets b/entry/src/main/ets/controller/CameraController.ets index 9acb1d7dc93957d21b2fc6f1c1b6f4fa3c709bc0..9d6a555e8c68abac1e9352eb63a6fa8331cb61b9 100644 --- a/entry/src/main/ets/controller/CameraController.ets +++ b/entry/src/main/ets/controller/CameraController.ets @@ -24,6 +24,12 @@ import Logger from '../common/utils/Logger'; const TAG: string = "[CameraController]"; +interface FaceBoundingBox { + topLeftX: number; + topLeftY: number; + width: number; + height: number; +} let cameraInput: camera.CameraInput; let xComponentPreviewOutput: camera.PreviewOutput; @@ -359,23 +365,36 @@ export class CameraController { if (err !== undefined && err.code !== 0) { return; } + const faceBoundingBoxes: Array = []; + let unifiedTimestamp = 0; + let timestampSet = false; // Iterate through the array to find an object with type equal to 0 for (const metadataObject of metadataObjectArr) { if (metadataObject.type === 0) { + if (!timestampSet) { + unifiedTimestamp = metadataObject.timestamp; + timestampSet = true; + } + faceBoundingBoxes.push({ + topLeftX: metadataObject.boundingBox.topLeftX, + topLeftY: metadataObject.boundingBox.topLeftY, + width: metadataObject.boundingBox.width, + height: metadataObject.boundingBox.height + }) Logger.info(TAG, - `find the metadataObject whose type is 0, timestamp: ${metadataObject.timestamp}, ${metadataObject.boundingBox.topLeftX}, ${metadataObject.boundingBox.topLeftY}, ${metadataObject.boundingBox.width}, ${metadataObject.boundingBox.height}.`); - Logger.debug(TAG, `UpdateFaceInfoFromCamera isOpenROI=${this.isOpenROI}`) - recorder.UpdateFaceInfoFromCamera( - this.nativeRecorderObj, - this.isOpenROI, - metadataObject.timestamp, - metadataObject.boundingBox.topLeftX, - metadataObject.boundingBox.topLeftY, - metadataObject.boundingBox.width, - metadataObject.boundingBox.height) - break; + `find the metadataObject ROI whose type is 0, timestamp: ${metadataObject.timestamp}, ${metadataObject.boundingBox.topLeftX}, ${metadataObject.boundingBox.topLeftY}, ${metadataObject.boundingBox.width}, ${metadataObject.boundingBox.height}.`); + Logger.info(TAG, `UpdateFaceInfoFromCamera isOpenROI=${this.isOpenROI}`) } } + if (faceBoundingBoxes.length > 0) { + Logger.info(TAG, `ROI metadata output metadataObjectsAvailable -> UpdateFaceInfoFromCamera.`); + recorder.UpdateFaceInfoFromCamera( + this.nativeRecorderObj, + this.isOpenROI, + unifiedTimestamp, + faceBoundingBoxes + ); + } Logger.info(TAG, `metadata output metadataObjectsAvailable.`); }); }