aboutsummaryrefslogtreecommitdiff
path: root/camera/QCamera2/HAL
diff options
context:
space:
mode:
authorPrateek Chaubey <chaubeyprateek@gmail.com>2018-01-07 20:55:14 +0530
committerDavide Garberi <dade.garberi@gmail.com>2018-01-19 14:09:15 +0100
commit6616278131edd80a12545085e06ee6b0e0a0a788 (patch)
tree0aef88ed11809a9d67f6abe4dc2ff782a14737e2 /camera/QCamera2/HAL
parentcc4ccf34871da343111bf68d16ba4e4c67cac1dc (diff)
msm8996-common: zuk: Import OSS Camera HAL
Tag: LA.HB.1.3.2-32600-8x96.0 Signed-off-by: Davide Garberi <dade.garberi@gmail.com>
Diffstat (limited to 'camera/QCamera2/HAL')
-rw-r--r--camera/QCamera2/HAL/QCamera2HWI.cpp10417
-rw-r--r--camera/QCamera2/HAL/QCamera2HWI.h795
-rw-r--r--camera/QCamera2/HAL/QCamera2HWICallbacks.cpp3512
-rw-r--r--camera/QCamera2/HAL/QCameraAllocator.h63
-rw-r--r--camera/QCamera2/HAL/QCameraChannel.cpp1601
-rw-r--r--camera/QCamera2/HAL/QCameraChannel.h171
-rwxr-xr-xcamera/QCamera2/HAL/QCameraMem.cpp2448
-rw-r--r--camera/QCamera2/HAL/QCameraMem.h295
-rw-r--r--camera/QCamera2/HAL/QCameraMuxer.cpp2823
-rw-r--r--camera/QCamera2/HAL/QCameraMuxer.h284
-rw-r--r--camera/QCamera2/HAL/QCameraParameters.cpp14523
-rw-r--r--camera/QCamera2/HAL/QCameraParameters.h1234
-rw-r--r--camera/QCamera2/HAL/QCameraParametersIntf.cpp1421
-rw-r--r--camera/QCamera2/HAL/QCameraParametersIntf.h310
-rw-r--r--camera/QCamera2/HAL/QCameraPostProc.cpp3661
-rw-r--r--camera/QCamera2/HAL/QCameraPostProc.h250
-rw-r--r--camera/QCamera2/HAL/QCameraStateMachine.cpp3867
-rw-r--r--camera/QCamera2/HAL/QCameraStateMachine.h263
-rw-r--r--camera/QCamera2/HAL/QCameraStream.cpp2656
-rw-r--r--camera/QCamera2/HAL/QCameraStream.h272
-rw-r--r--camera/QCamera2/HAL/QCameraThermalAdapter.cpp177
-rw-r--r--camera/QCamera2/HAL/QCameraThermalAdapter.h91
-rw-r--r--camera/QCamera2/HAL/android/QCamera2External.h47
-rw-r--r--camera/QCamera2/HAL/test/Android.mk64
-rw-r--r--camera/QCamera2/HAL/test/qcamera_test.cpp3710
-rw-r--r--camera/QCamera2/HAL/test/qcamera_test.h361
-rw-r--r--camera/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h100
-rw-r--r--camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h49
-rw-r--r--camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h95
-rw-r--r--camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h46
-rw-r--r--camera/QCamera2/HAL/wrapper/QualcommCamera.cpp450
-rw-r--r--camera/QCamera2/HAL/wrapper/QualcommCamera.h107
32 files changed, 56163 insertions, 0 deletions
diff --git a/camera/QCamera2/HAL/QCamera2HWI.cpp b/camera/QCamera2/HAL/QCamera2HWI.cpp
new file mode 100644
index 0000000..c7f9d44
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2HWI.cpp
@@ -0,0 +1,10417 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+
+// To remove
+#include <cutils/properties.h>
+
+// System definitions
+#include <utils/Errors.h>
+#include <dlfcn.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "gralloc_priv.h"
+#include "native_handle.h"
+
+// Camera definitions
+#include "android/QCamera2External.h"
+#include "QCamera2HWI.h"
+#include "QCameraBufferMaps.h"
+#include "QCameraFlash.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define MAP_TO_DRIVER_COORDINATE(val, base, scale, offset) \
+ ((int32_t)val * (int32_t)scale / (int32_t)base + (int32_t)offset)
+#define CAMERA_MIN_STREAMING_BUFFERS 3
+#define EXTRA_ZSL_PREVIEW_STREAM_BUF 2
+#define CAMERA_MIN_JPEG_ENCODING_BUFFERS 2
+#define CAMERA_MIN_VIDEO_BUFFERS 9
+#define CAMERA_MIN_CALLBACK_BUFFERS 5
+#define CAMERA_LONGSHOT_STAGES 4
+#define CAMERA_MIN_CAMERA_BATCH_BUFFERS 6
+#define CAMERA_ISP_PING_PONG_BUFFERS 2
+#define MIN_UNDEQUEUED_BUFFERS 1 // This is required if preview window is not set
+
+#define HDR_CONFIDENCE_THRESHOLD 0.4
+
+#define CAMERA_OPEN_PERF_TIME_OUT 500 // 500 milliseconds
+
+// Very long wait, just to be sure we don't deadlock
+#define CAMERA_DEFERRED_THREAD_TIMEOUT 5000000000 // 5 seconds
+#define CAMERA_DEFERRED_MAP_BUF_TIMEOUT 2000000000 // 2 seconds
+#define CAMERA_MIN_METADATA_BUFFERS 10 // Need at least 10 for ZSL snapshot
+#define CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS 5
+#define CAMERA_MAX_PARAM_APPLY_DELAY 3
+
+namespace qcamera {
+
+extern cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
+extern pthread_mutex_t gCamLock;
+volatile uint32_t gCamHalLogLevel = 1;
+extern uint8_t gNumCameraSessions;
+uint32_t QCamera2HardwareInterface::sNextJobId = 1;
+
+camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
+ .set_preview_window = QCamera2HardwareInterface::set_preview_window,
+ .set_callbacks = QCamera2HardwareInterface::set_CallBacks,
+ .enable_msg_type = QCamera2HardwareInterface::enable_msg_type,
+ .disable_msg_type = QCamera2HardwareInterface::disable_msg_type,
+ .msg_type_enabled = QCamera2HardwareInterface::msg_type_enabled,
+
+ .start_preview = QCamera2HardwareInterface::start_preview,
+ .stop_preview = QCamera2HardwareInterface::stop_preview,
+ .preview_enabled = QCamera2HardwareInterface::preview_enabled,
+ .store_meta_data_in_buffers= QCamera2HardwareInterface::store_meta_data_in_buffers,
+
+ .start_recording = QCamera2HardwareInterface::start_recording,
+ .stop_recording = QCamera2HardwareInterface::stop_recording,
+ .recording_enabled = QCamera2HardwareInterface::recording_enabled,
+ .release_recording_frame = QCamera2HardwareInterface::release_recording_frame,
+
+ .auto_focus = QCamera2HardwareInterface::auto_focus,
+ .cancel_auto_focus = QCamera2HardwareInterface::cancel_auto_focus,
+
+ .take_picture = QCamera2HardwareInterface::take_picture,
+ .cancel_picture = QCamera2HardwareInterface::cancel_picture,
+
+ .set_parameters = QCamera2HardwareInterface::set_parameters,
+ .get_parameters = QCamera2HardwareInterface::get_parameters,
+ .put_parameters = QCamera2HardwareInterface::put_parameters,
+ .send_command = QCamera2HardwareInterface::send_command,
+
+ .release = QCamera2HardwareInterface::release,
+ .dump = QCamera2HardwareInterface::dump,
+};
+
+/*===========================================================================
+ * FUNCTION : set_preview_window
+ *
+ * DESCRIPTION: set preview window.
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @window : window ops table
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
+ struct preview_stream_ops *window)
+{
+ ATRACE_CALL();
+ int rc = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d window = %p", hw->getCameraId(), window);
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ rc = hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
+ if (rc == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, &apiResult);
+ rc = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : set_CallBacks
+ *
+ * DESCRIPTION: set callbacks for notify and data
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @notify_cb : notify cb
+ * @data_cb : data cb
+ * @data_cb_timestamp : video data cd with timestamp
+ * @get_memory : ops table for request gralloc memory
+ * @user : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::set_CallBacks(struct camera_device *device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ qcamera_sm_evt_setcb_payload_t payload;
+ payload.notify_cb = notify_cb;
+ payload.data_cb = data_cb;
+ payload.data_cb_timestamp = data_cb_timestamp;
+ payload.get_memory = get_memory;
+ payload.user = user;
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t rc = hw->processAPI(QCAMERA_SM_EVT_SET_CALLBACKS, (void *)&payload);
+ if (rc == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_CALLBACKS, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+}
+
+/*===========================================================================
+ * FUNCTION : enable_msg_type
+ *
+ * DESCRIPTION: enable certain msg type
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @msg_type : msg type mask
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::enable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+ ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t rc = hw->processAPI(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, (void *)&msg_type);
+ if (rc == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+}
+
+/*===========================================================================
+ * FUNCTION : disable_msg_type
+ *
+ * DESCRIPTION: disable certain msg type
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @msg_type : msg type mask
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::disable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+ ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t rc = hw->processAPI(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, (void *)&msg_type);
+ if (rc == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+}
+
+/*===========================================================================
+ * FUNCTION : msg_type_enabled
+ *
+ * DESCRIPTION: if certain msg type is enabled
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @msg_type : msg type mask
+ *
+ * RETURN : 1 -- enabled
+ * 0 -- not enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msg_type_enabled(struct camera_device *device, int32_t msg_type)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, (void *)&msg_type);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, &apiResult);
+ ret = apiResult.enabled;
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : prepare_preview
+ *
+ * DESCRIPTION: prepare preview
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::prepare_preview(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGH("[KPI Perf]: E PROFILE_PREPARE_PREVIEW camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_PREPARE_PREVIEW;
+ ret = hw->processAPI(evt, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(evt, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGH("[KPI Perf]: X");
+ return ret;
+}
+
+
+/*===========================================================================
+ * FUNCTION : start_preview
+ *
+ * DESCRIPTION: start preview
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_preview(struct camera_device *device)
+{
+ KPI_ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("[KPI Perf]: E PROFILE_START_PREVIEW camera id %d",
+ hw->getCameraId());
+
+ // Release the timed perf lock acquired in openCamera
+ hw->m_perfLock.lock_rel_timed();
+
+ hw->m_perfLock.lock_acq();
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
+ if (hw->isNoDisplayMode()) {
+ evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
+ }
+ ret = hw->processAPI(evt, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(evt, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ hw->m_bPreviewStarted = true;
+ LOGI("[KPI Perf]: X ret = %d", ret);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : stop_preview
+ *
+ * DESCRIPTION: stop preview
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_preview(struct camera_device *device)
+{
+ KPI_ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGI("[KPI Perf]: E PROFILE_STOP_PREVIEW camera id %d",
+ hw->getCameraId());
+
+ // Disable power Hint for preview
+ hw->m_perfLock.powerHint(POWER_HINT_CAM_PREVIEW, false);
+
+ hw->m_perfLock.lock_acq();
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_PREVIEW, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_STOP_PREVIEW, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGI("[KPI Perf]: X ret = %d", ret);
+}
+
+/*===========================================================================
+ * FUNCTION : preview_enabled
+ *
+ * DESCRIPTION: if preview is running
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : 1 -- running
+ * 0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::preview_enabled(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_PREVIEW_ENABLED, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_PREVIEW_ENABLED, &apiResult);
+ ret = apiResult.enabled;
+ }
+
+ //if preview enabled, can enable preview callback send
+ if(apiResult.enabled) {
+ hw->m_stateMachine.setPreviewCallbackNeeded(true);
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : store_meta_data_in_buffers
+ *
+ * DESCRIPTION: if need to store meta data in buffers for video frame
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @enable : flag if enable
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::store_meta_data_in_buffers(
+ struct camera_device *device, int enable)
+{
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, (void *)&enable);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : restart_start_preview
+ *
+ * DESCRIPTION: start preview as part of the restart preview
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::restart_start_preview(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+
+ if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = hw->processAPI(QCAMERA_SM_EVT_RESTART_START_PREVIEW, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_RESTART_START_PREVIEW, &apiResult);
+ ret = apiResult.status;
+ }
+ } else {
+ LOGE("This function is not supposed to be called in single-camera mode");
+ ret = INVALID_OPERATION;
+ }
+ // Preview restart done, update the mPreviewRestartNeeded flag to false.
+ hw->mPreviewRestartNeeded = false;
+ hw->unlockAPI();
+ LOGI("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : restart_stop_preview
+ *
+ * DESCRIPTION: stop preview as part of the restart preview
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::restart_stop_preview(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+
+ if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = hw->processAPI(QCAMERA_SM_EVT_RESTART_STOP_PREVIEW, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_RESTART_STOP_PREVIEW, &apiResult);
+ ret = apiResult.status;
+ }
+ } else {
+ LOGE("This function is not supposed to be called in single-camera mode");
+ ret = INVALID_OPERATION;
+ }
+
+ hw->unlockAPI();
+ LOGI("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : pre_start_recording
+ *
+ * DESCRIPTION: prepare for the start recording
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::pre_start_recording(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGH("[KPI Perf]: E PROFILE_PRE_START_RECORDING camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_PRE_START_RECORDING, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_PRE_START_RECORDING, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGH("[KPI Perf]: X");
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : start_recording
+ *
+ * DESCRIPTION: start recording
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_recording(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("[KPI Perf]: E PROFILE_START_RECORDING camera id %d",
+ hw->getCameraId());
+ // Give HWI control to call pre_start_recording in single camera mode.
+ // In dual-cam mode, this control belongs to muxer.
+ if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = pre_start_recording(device);
+ if (ret != NO_ERROR) {
+ LOGE("pre_start_recording failed with ret = %d", ret);
+ return ret;
+ }
+ }
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_START_RECORDING, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_START_RECORDING, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ hw->m_bRecordStarted = true;
+ LOGI("[KPI Perf]: X ret = %d", ret);
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : stop_recording
+ *
+ * DESCRIPTION: stop recording
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_recording(struct camera_device *device)
+{
+ ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGI("[KPI Perf]: E PROFILE_STOP_RECORDING camera id %d",
+ hw->getCameraId());
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_RECORDING, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_STOP_RECORDING, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGI("[KPI Perf]: X ret = %d", ret);
+}
+
+/*===========================================================================
+ * FUNCTION : recording_enabled
+ *
+ * DESCRIPTION: if recording is running
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : 1 -- running
+ * 0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::recording_enabled(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_RECORDING_ENABLED, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_RECORDING_ENABLED, &apiResult);
+ ret = apiResult.enabled;
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : release_recording_frame
+ *
+ * DESCRIPTION: return recording frame back
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @opaque : ptr to frame to be returned
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release_recording_frame(
+ struct camera_device *device, const void *opaque)
+{
+ ATRACE_CALL();
+ int32_t ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ if (!opaque) {
+ LOGE("Error!! Frame info is NULL");
+ return;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ //Close and delete duplicated native handle and FD's.
+ if (hw->mVideoMem != NULL) {
+ ret = hw->mVideoMem->closeNativeHandle(opaque,
+ hw->mStoreMetaDataInFrame > 0);
+ if (ret != NO_ERROR) {
+ LOGE("Invalid video metadata");
+ return;
+ }
+ } else {
+ LOGW("Possible FD leak. Release recording called after stop");
+ }
+
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+}
+
+/*===========================================================================
+ * FUNCTION : auto_focus
+ *
+ * DESCRIPTION: start auto focus
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::auto_focus(struct camera_device *device)
+{
+ KPI_ATRACE_INT("Camera:AutoFocus", 1);
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGH("[KPI Perf] : E PROFILE_AUTO_FOCUS camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_START_AUTO_FOCUS, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_START_AUTO_FOCUS, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGH("[KPI Perf] : X ret = %d", ret);
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_auto_focus(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGH("[KPI Perf] : E PROFILE_CANCEL_AUTO_FOCUS camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGH("[KPI Perf] : X ret = %d", ret);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : pre_take_picture
+ *
+ * DESCRIPTION: pre take picture, restart preview if necessary.
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::pre_take_picture(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGH("[KPI Perf]: E PROFILE_PRE_TAKE_PICTURE camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_PRE_TAKE_PICTURE, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_PRE_TAKE_PICTURE, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGH("[KPI Perf]: X");
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : take_picture
+ *
+ * DESCRIPTION: take picture
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::take_picture(struct camera_device *device)
+{
+ KPI_ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("[KPI Perf]: E PROFILE_TAKE_PICTURE camera id %d",
+ hw->getCameraId());
+ if (!hw->mLongshotEnabled) {
+ hw->m_perfLock.lock_acq();
+ }
+ qcamera_api_result_t apiResult;
+
+ /** Added support for Retro-active Frames:
+ * takePicture() is called before preparing Snapshot to indicate the
+ * mm-camera-channel to pick up legacy frames even
+ * before LED estimation is triggered.
+ */
+
+ LOGH("isLiveSnap %d, isZSL %d, isHDR %d longshot = %d",
+ hw->isLiveSnapshot(), hw->isZSLMode(), hw->isHDRMode(),
+ hw->isLongshotEnabled());
+
+ // Check for Retro-active Frames
+ if ((hw->mParameters.getNumOfRetroSnapshots() > 0) &&
+ !hw->isLiveSnapshot() && hw->isZSLMode() &&
+ !hw->isHDRMode() && !hw->isLongshotEnabled()) {
+ // Set Retro Picture Mode
+ hw->setRetroPicture(1);
+ hw->m_bLedAfAecLock = 0;
+ LOGL("Retro Enabled");
+
+ // Give HWI control to call pre_take_picture in single camera mode.
+ // In dual-cam mode, this control belongs to muxer.
+ if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = pre_take_picture(device);
+ if (ret != NO_ERROR) {
+ LOGE("pre_take_picture failed with ret = %d",ret);
+ return ret;
+ }
+ }
+
+ /* Call take Picture for total number of snapshots required.
+ This includes the number of retro frames and normal frames */
+ hw->lockAPI();
+ ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+ if (ret == NO_ERROR) {
+ // Wait for retro frames, before calling prepare snapshot
+ LOGD("Wait for Retro frames to be done");
+ hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+ ret = apiResult.status;
+ }
+ /* Unlock API since it is acquired in prepare snapshot seperately */
+ hw->unlockAPI();
+
+ /* Prepare snapshot in case LED needs to be flashed */
+ LOGD("Start Prepare Snapshot");
+ ret = hw->prepare_snapshot(device);
+ }
+ else {
+ hw->setRetroPicture(0);
+ // Check if prepare snapshot is done
+ if (!hw->mPrepSnapRun) {
+ // Ignore the status from prepare_snapshot
+ hw->prepare_snapshot(device);
+ }
+
+ // Give HWI control to call pre_take_picture in single camera mode.
+ // In dual-cam mode, this control belongs to muxer.
+ if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = pre_take_picture(device);
+ if (ret != NO_ERROR) {
+ LOGE("pre_take_picture failed with ret = %d",ret);
+ return ret;
+ }
+ }
+
+ // Regardless what the result value for prepare_snapshot,
+ // go ahead with capture anyway. Just like the way autofocus
+ // is handled in capture case
+ /* capture */
+ LOGL("Capturing normal frames");
+ hw->lockAPI();
+ ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ if (!hw->isLongshotEnabled()){
+ // For longshot mode, we prepare snapshot only once
+ hw->mPrepSnapRun = false;
+ }
+ }
+ LOGI("[KPI Perf]: X ret = %d", ret);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : cancel_picture
+ *
+ * DESCRIPTION: cancel current take picture request
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_picture(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("[KPI Perf]: E PROFILE_CANCEL_PICTURE camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_CANCEL_PICTURE, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGI("[KPI Perf]: X camera id %d ret = %d", hw->getCameraId(), ret);
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : set_parameters
+ *
+ * DESCRIPTION: set camera parameters
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @parms : string of packed parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_parameters(struct camera_device *device,
+ const char *parms)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS, (void *)parms);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS, &apiResult);
+ ret = apiResult.status;
+ }
+
+ // Give HWI control to restart (if necessary) after set params
+ // in single camera mode. In dual-cam mode, this control belongs to muxer.
+ if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+ if ((ret == NO_ERROR) && hw->getNeedRestart()) {
+ LOGD("stopping after param change");
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_STOP, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_STOP, &apiResult);
+ ret = apiResult.status;
+ }
+ }
+
+ if (ret == NO_ERROR) {
+ LOGD("committing param change");
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, &apiResult);
+ ret = apiResult.status;
+ }
+ }
+
+ if ((ret == NO_ERROR) && hw->getNeedRestart()) {
+ LOGD("restarting after param change");
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_RESTART, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_RESTART, &apiResult);
+ ret = apiResult.status;
+ }
+ }
+ }
+
+ hw->unlockAPI();
+ LOGD("X camera id %d ret %d", hw->getCameraId(), ret);
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : stop_after_set_params
+ *
+ * DESCRIPTION: stop after a set param call, if necessary
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stop_after_set_params(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+
+ if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_STOP, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_STOP, &apiResult);
+ ret = apiResult.status;
+ }
+ } else {
+ LOGE("is not supposed to be called in single-camera mode");
+ ret = INVALID_OPERATION;
+ }
+
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : commit_params
+ *
+ * DESCRIPTION: commit after a set param call
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::commit_params(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+
+ if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, &apiResult);
+ ret = apiResult.status;
+ }
+ } else {
+ LOGE("is not supposed to be called in single-camera mode");
+ ret = INVALID_OPERATION;
+ }
+
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : restart_after_set_params
+ *
+ * DESCRIPTION: restart after a set param call, if necessary
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::restart_after_set_params(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+
+ if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_RESTART, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_RESTART, &apiResult);
+ ret = apiResult.status;
+ }
+ } else {
+ LOGE("is not supposed to be called in single-camera mode");
+ ret = INVALID_OPERATION;
+ }
+
+ hw->unlockAPI();
+ LOGD("X camera id %d", hw->getCameraId());
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : get_parameters
+ *
+ * DESCRIPTION: query camera parameters
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : packed parameters in a string
+ *==========================================================================*/
+char* QCamera2HardwareInterface::get_parameters(struct camera_device *device)
+{
+ ATRACE_CALL();
+ char *ret = NULL;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return NULL;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t rc = hw->processAPI(QCAMERA_SM_EVT_GET_PARAMS, NULL);
+ if (rc == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_GET_PARAMS, &apiResult);
+ ret = apiResult.params;
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : put_parameters
+ *
+ * DESCRIPTION: return camera parameters string back to HAL
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @parm : ptr to parameter string to be returned
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::put_parameters(struct camera_device *device,
+ char *parm)
+{
+ ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t ret = hw->processAPI(QCAMERA_SM_EVT_PUT_PARAMS, (void *)parm);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_PUT_PARAMS, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+}
+
+/*===========================================================================
+ * FUNCTION : send_command
+ *
+ * DESCRIPTION: command to be executed
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @cmd : cmd to be executed
+ * @arg1 : ptr to optional argument1
+ * @arg2 : ptr to optional argument2
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::send_command(struct camera_device *device,
+ int32_t cmd,
+ int32_t arg1,
+ int32_t arg2)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+
+ qcamera_sm_evt_command_payload_t payload;
+ memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
+ payload.cmd = cmd;
+ payload.arg1 = arg1;
+ payload.arg2 = arg2;
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND, (void *)&payload);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : send_command_restart
+ *
+ * DESCRIPTION: restart if necessary after a send_command
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @cmd : cmd to be executed
+ * @arg1 : ptr to optional argument1
+ * @arg2 : ptr to optional argument2
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::send_command_restart(struct camera_device *device,
+ int32_t cmd,
+ int32_t arg1,
+ int32_t arg2)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+
+ qcamera_sm_evt_command_payload_t payload;
+ memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
+ payload.cmd = cmd;
+ payload.arg1 = arg1;
+ payload.arg2 = arg2;
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND_RESTART, (void *)&payload);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND_RESTART, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : release
+ *
+ * DESCRIPTION: release camera resource
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release(struct camera_device *device)
+{
+ ATRACE_CALL();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE, &apiResult);
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+}
+
+/*===========================================================================
+ * FUNCTION : dump
+ *
+ * DESCRIPTION: dump camera status
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @fd : fd for status to be dumped to
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(struct camera_device *device, int fd)
+{
+ int ret = NO_ERROR;
+
+ //Log level property is read when "adb shell dumpsys media.camera" is
+ //called so that the log level can be controlled without restarting
+ //media server
+ getLogLevel();
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_DUMP, (void *)&fd);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_DUMP, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : close_camera_device
+ *
+ * DESCRIPTION: close camera device
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::close_camera_device(hw_device_t *hw_dev)
+{
+ KPI_ATRACE_CALL();
+ int ret = NO_ERROR;
+
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(
+ reinterpret_cast<camera_device_t *>(hw_dev)->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGI("[KPI Perf]: E camera id %d", hw->getCameraId());
+ delete hw;
+ LOGI("[KPI Perf]: X");
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : register_face_image
+ *
+ * DESCRIPTION: register a face image into imaging lib for face authenticatio/
+ * face recognition
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ * @img_ptr : ptr to image buffer
+ * @config : ptr to config about input image, i.e., format, dimension, and etc.
+ *
+ * RETURN : >=0 unique ID of face registerd.
+ * <0 failure.
+ *==========================================================================*/
+int QCamera2HardwareInterface::register_face_image(struct camera_device *device,
+ void *img_ptr,
+ cam_pp_offline_src_config_t *config)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ LOGD("E camera id %d", hw->getCameraId());
+ qcamera_sm_evt_reg_face_payload_t payload;
+ memset(&payload, 0, sizeof(qcamera_sm_evt_reg_face_payload_t));
+ payload.img_ptr = img_ptr;
+ payload.config = config;
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+ ret = hw->processAPI(QCAMERA_SM_EVT_REG_FACE_IMAGE, (void *)&payload);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_REG_FACE_IMAGE, &apiResult);
+ ret = apiResult.handle;
+ }
+ hw->unlockAPI();
+ LOGD("E camera id %d", hw->getCameraId());
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : prepare_snapshot
+ *
+ * DESCRIPTION: prepares hardware for snapshot
+ *
+ * PARAMETERS :
+ * @device : ptr to camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::prepare_snapshot(struct camera_device *device)
+{
+ ATRACE_CALL();
+ int ret = NO_ERROR;
+ QCamera2HardwareInterface *hw =
+ reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+ if (!hw) {
+ LOGE("NULL camera device");
+ return BAD_VALUE;
+ }
+ if (hw->isLongshotEnabled() && hw->mPrepSnapRun == true) {
+ // For longshot mode, we prepare snapshot only once
+ LOGH("prepare snapshot only once ");
+ return NO_ERROR;
+ }
+ LOGH("[KPI Perf]: E PROFILE_PREPARE_SNAPSHOT camera id %d",
+ hw->getCameraId());
+ hw->lockAPI();
+ qcamera_api_result_t apiResult;
+
+ /* Prepare snapshot in case LED needs to be flashed */
+ if (hw->mFlashNeeded || hw->mParameters.isChromaFlashEnabled()) {
+ /* Prepare snapshot in case LED needs to be flashed */
+ ret = hw->processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
+ if (ret == NO_ERROR) {
+ hw->waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
+ ret = apiResult.status;
+ }
+ hw->mPrepSnapRun = true;
+ }
+ hw->unlockAPI();
+ LOGH("[KPI Perf]: X, ret: %d", ret);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : QCamera2HardwareInterface
+ *
+ * DESCRIPTION: constructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS :
+ * @cameraId : camera ID
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCamera2HardwareInterface::QCamera2HardwareInterface(uint32_t cameraId)
+ : mCameraId(cameraId),
+ mCameraHandle(NULL),
+ mCameraOpened(false),
+ m_bRelCamCalibValid(false),
+ mPreviewWindow(NULL),
+ mMsgEnabled(0),
+ mStoreMetaDataInFrame(0),
+ mJpegCb(NULL),
+ mCallbackCookie(NULL),
+ mJpegCallbackCookie(NULL),
+ m_bMpoEnabled(TRUE),
+ m_stateMachine(this),
+ m_smThreadActive(true),
+ m_postprocessor(this),
+ m_thermalAdapter(QCameraThermalAdapter::getInstance()),
+ m_cbNotifier(this),
+ m_bPreviewStarted(false),
+ m_bRecordStarted(false),
+ m_currentFocusState(CAM_AF_STATE_INACTIVE),
+ mDumpFrmCnt(0U),
+ mDumpSkipCnt(0U),
+ mThermalLevel(QCAMERA_THERMAL_NO_ADJUSTMENT),
+ mActiveAF(false),
+ m_HDRSceneEnabled(false),
+ mLongshotEnabled(false),
+ mLiveSnapshotThread(0),
+ mIntPicThread(0),
+ mFlashNeeded(false),
+ mDeviceRotation(0U),
+ mCaptureRotation(0U),
+ mJpegExifRotation(0U),
+ mUseJpegExifRotation(false),
+ mIs3ALocked(false),
+ mPrepSnapRun(false),
+ mZoomLevel(0),
+ mPreviewRestartNeeded(false),
+ mVFrameCount(0),
+ mVLastFrameCount(0),
+ mVLastFpsTime(0),
+ mVFps(0),
+ mPFrameCount(0),
+ mPLastFrameCount(0),
+ mPLastFpsTime(0),
+ mPFps(0),
+ mInstantAecFrameCount(0),
+ m_bIntJpegEvtPending(false),
+ m_bIntRawEvtPending(false),
+ mReprocJob(0),
+ mJpegJob(0),
+ mMetadataAllocJob(0),
+ mInitPProcJob(0),
+ mParamAllocJob(0),
+ mParamInitJob(0),
+ mOutputCount(0),
+ mInputCount(0),
+ mAdvancedCaptureConfigured(false),
+ mHDRBracketingEnabled(false),
+ mNumPreviewFaces(-1),
+ mJpegClientHandle(0),
+ mJpegHandleOwner(false),
+ mMetadataMem(NULL),
+ mVideoMem(NULL),
+ mCACDoneReceived(false),
+ m_bNeedRestart(false)
+{
+#ifdef TARGET_TS_MAKEUP
+ memset(&mFaceRect, -1, sizeof(mFaceRect));
+#endif
+ getLogLevel();
+ ATRACE_CALL();
+ mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
+ mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
+ mCameraDevice.common.close = close_camera_device;
+ mCameraDevice.ops = &mCameraOps;
+ mCameraDevice.priv = this;
+
+ pthread_mutex_init(&m_lock, NULL);
+ pthread_cond_init(&m_cond, NULL);
+
+ m_apiResultList = NULL;
+
+ pthread_mutex_init(&m_evtLock, NULL);
+ pthread_cond_init(&m_evtCond, NULL);
+ memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+
+
+ pthread_mutex_init(&m_int_lock, NULL);
+ pthread_cond_init(&m_int_cond, NULL);
+
+ memset(m_channels, 0, sizeof(m_channels));
+
+ memset(&mExifParams, 0, sizeof(mm_jpeg_exif_params_t));
+
+ memset(m_BackendFileName, 0, QCAMERA_MAX_FILEPATH_LENGTH);
+
+ memset(mDefOngoingJobs, 0, sizeof(mDefOngoingJobs));
+ memset(&mJpegMetadata, 0, sizeof(mJpegMetadata));
+ memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+ memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
+
+ mDeferredWorkThread.launch(deferredWorkRoutine, this);
+ mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+ m_perfLock.lock_init();
+
+ pthread_mutex_init(&mGrallocLock, NULL);
+ mEnqueuedBuffers = 0;
+ mFrameSkipStart = 0;
+ mFrameSkipEnd = 0;
+ mLastPreviewFrameID = 0;
+
+ //Load and read GPU library.
+ lib_surface_utils = NULL;
+ LINK_get_surface_pixel_alignment = NULL;
+ mSurfaceStridePadding = CAM_PAD_TO_32;
+ lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
+ if (lib_surface_utils) {
+ *(void **)&LINK_get_surface_pixel_alignment =
+ dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
+ if (LINK_get_surface_pixel_alignment) {
+ mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
+ }
+ dlclose(lib_surface_utils);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCamera2HardwareInterface
+ *
+ * DESCRIPTION: destructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCamera2HardwareInterface::~QCamera2HardwareInterface()
+{
+ LOGH("E");
+
+ mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+ mDeferredWorkThread.exit();
+
+ if (mMetadataMem != NULL) {
+ delete mMetadataMem;
+ mMetadataMem = NULL;
+ }
+
+ m_perfLock.lock_acq();
+ lockAPI();
+ m_smThreadActive = false;
+ unlockAPI();
+ m_stateMachine.releaseThread();
+ closeCamera();
+ m_perfLock.lock_rel();
+ m_perfLock.lock_deinit();
+ pthread_mutex_destroy(&m_lock);
+ pthread_cond_destroy(&m_cond);
+ pthread_mutex_destroy(&m_evtLock);
+ pthread_cond_destroy(&m_evtCond);
+ pthread_mutex_destroy(&m_int_lock);
+ pthread_cond_destroy(&m_int_cond);
+ pthread_mutex_destroy(&mGrallocLock);
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : deferPPInit
+ *
+ * DESCRIPTION: Queue postproc init task to deferred thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : uint32_t job id of pproc init job
+ * 0 -- failure
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::deferPPInit()
+{
+ // init pproc
+ DeferWorkArgs args;
+ DeferPProcInitArgs pprocInitArgs;
+
+ memset(&args, 0, sizeof(DeferWorkArgs));
+ memset(&pprocInitArgs, 0, sizeof(DeferPProcInitArgs));
+
+ pprocInitArgs.jpeg_cb = jpegEvtHandle;
+ pprocInitArgs.user_data = this;
+ args.pprocInitArgs = pprocInitArgs;
+
+ return queueDeferredWork(CMD_DEF_PPROC_INIT,
+ args);
+}
+
+/*===========================================================================
+ * FUNCTION : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS :
+ * @hw_device : double ptr for camera device struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
+{
+ KPI_ATRACE_CALL();
+ int rc = NO_ERROR;
+ if (mCameraOpened) {
+ *hw_device = NULL;
+ LOGE("Permission Denied");
+ return PERMISSION_DENIED;
+ }
+ LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
+ mCameraId);
+ m_perfLock.lock_acq_timed(CAMERA_OPEN_PERF_TIME_OUT);
+ rc = openCamera();
+ if (rc == NO_ERROR){
+ *hw_device = &mCameraDevice.common;
+ if (m_thermalAdapter.init(this) != 0) {
+ LOGW("Init thermal adapter failed");
+ }
+ }
+ else
+ *hw_device = NULL;
+
+ LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
+ mCameraId, rc);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera()
+{
+ int32_t rc = NO_ERROR;
+ char value[PROPERTY_VALUE_MAX];
+
+ if (mCameraHandle) {
+ LOGE("Failure: Camera already opened");
+ return ALREADY_EXISTS;
+ }
+
+ rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
+ if (rc < 0) {
+ LOGE("Failed to reserve flash for camera id: %d",
+ mCameraId);
+ return UNKNOWN_ERROR;
+ }
+
+ // alloc param buffer
+ DeferWorkArgs args;
+ memset(&args, 0, sizeof(args));
+ mParamAllocJob = queueDeferredWork(CMD_DEF_PARAM_ALLOC, args);
+ if (mParamAllocJob == 0) {
+ LOGE("Failed queueing PARAM_ALLOC job");
+ return -ENOMEM;
+ }
+
+ if (gCamCapability[mCameraId] != NULL) {
+ // allocate metadata buffers
+ DeferWorkArgs args;
+ DeferMetadataAllocArgs metadataAllocArgs;
+
+ memset(&args, 0, sizeof(args));
+ memset(&metadataAllocArgs, 0, sizeof(metadataAllocArgs));
+
+ uint32_t padding =
+ gCamCapability[mCameraId]->padding_info.plane_padding;
+ metadataAllocArgs.size = PAD_TO_SIZE(sizeof(metadata_buffer_t),
+ padding);
+ metadataAllocArgs.bufferCnt = CAMERA_MIN_METADATA_BUFFERS;
+ args.metadataAllocArgs = metadataAllocArgs;
+
+ mMetadataAllocJob = queueDeferredWork(CMD_DEF_METADATA_ALLOC, args);
+ if (mMetadataAllocJob == 0) {
+ LOGE("Failed to allocate metadata buffer");
+ rc = -ENOMEM;
+ goto error_exit1;
+ }
+
+ rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
+ if (rc) {
+ LOGE("camera_open failed. rc = %d, mCameraHandle = %p",
+ rc, mCameraHandle);
+ goto error_exit2;
+ }
+
+ mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+ camEvtHandle,
+ (void *) this);
+ } else {
+ LOGH("Capabilities not inited, initializing now.");
+
+ rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
+ if (rc) {
+ LOGE("camera_open failed. rc = %d, mCameraHandle = %p",
+ rc, mCameraHandle);
+ goto error_exit2;
+ }
+
+ if(NO_ERROR != initCapabilities(mCameraId,mCameraHandle)) {
+ LOGE("initCapabilities failed.");
+ rc = UNKNOWN_ERROR;
+ goto error_exit3;
+ }
+
+ mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+ camEvtHandle,
+ (void *) this);
+ }
+
+ // Init params in the background
+ // 1. It's safe to queue init job, even if alloc job is not yet complete.
+ // It will be queued to the same thread, so the alloc is guaranteed to
+ // finish first.
+ // 2. However, it is not safe to begin param init until after camera is
+ // open. That is why we wait until after camera open completes to schedule
+ // this task.
+ memset(&args, 0, sizeof(args));
+ mParamInitJob = queueDeferredWork(CMD_DEF_PARAM_INIT, args);
+ if (mParamInitJob == 0) {
+ LOGE("Failed queuing PARAM_INIT job");
+ rc = -ENOMEM;
+ goto error_exit3;
+ }
+
+ mCameraOpened = true;
+
+ //Notify display HAL that a camera session is active.
+ //But avoid calling the same during bootup because camera service might open/close
+ //cameras at boot time during its initialization and display service will also internally
+ //wait for camera service to initialize first while calling this display API, resulting in a
+ //deadlock situation. Since boot time camera open/close calls are made only to fetch
+ //capabilities, no need of this display bw optimization.
+ //Use "service.bootanim.exit" property to know boot status.
+ property_get("service.bootanim.exit", value, "0");
+ if (atoi(value) == 1) {
+ pthread_mutex_lock(&gCamLock);
+ if (gNumCameraSessions++ == 0) {
+ setCameraLaunchStatus(true);
+ }
+ pthread_mutex_unlock(&gCamLock);
+ }
+
+ return NO_ERROR;
+
+error_exit3:
+ if(mJpegClientHandle) {
+ deinitJpegHandle();
+ }
+ mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+ mCameraHandle = NULL;
+error_exit2:
+ waitDeferredWork(mMetadataAllocJob);
+error_exit1:
+ waitDeferredWork(mParamAllocJob);
+ return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION : bundleRelatedCameras
+ *
+ * DESCRIPTION: bundle cameras to enable syncing of cameras
+ *
+ * PARAMETERS :
+ * @sync :indicates whether syncing is On or Off
+ * @sessionid :session id for other camera session
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::bundleRelatedCameras(bool syncOn,
+ uint32_t sessionid)
+{
+ LOGD("bundleRelatedCameras sync %d with sessionid %d",
+ syncOn, sessionid);
+
+ int32_t rc = mParameters.bundleRelatedCameras(syncOn, sessionid);
+ if (rc != NO_ERROR) {
+ LOGE("bundleRelatedCameras failed %d", rc);
+ return rc;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getCameraSessionId
+ *
+ * DESCRIPTION: gets the backend session Id of this HWI instance
+ *
+ * PARAMETERS :
+ * @sessionid : pointer to the output session id
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::getCameraSessionId(uint32_t* session_id)
+{
+ int32_t rc = NO_ERROR;
+
+ if(session_id != NULL) {
+ rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
+ session_id);
+ LOGD("Getting Camera Session Id %d", *session_id);
+ } else {
+ LOGE("Session Id is Null");
+ return UNKNOWN_ERROR;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : isFrameSyncEnabled
+ *
+ * DESCRIPTION: returns whether frame sync is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : bool indicating whether frame sync is enabled
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isFrameSyncEnabled(void)
+{
+ return mParameters.isFrameSyncEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION : setFrameSyncEnabled
+ *
+ * DESCRIPTION: sets whether frame sync is enabled
+ *
+ * PARAMETERS :
+ * @enable : flag whether to enable or disable frame sync
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setFrameSyncEnabled(bool enable)
+{
+ return mParameters.setFrameSyncEnabled(enable);
+}
+
+/*===========================================================================
+ * FUNCTION : getRelatedCamSyncInfo
+ *
+ * DESCRIPTION:returns the related cam sync info for this HWI instance
+ *
+ * PARAMETERS :none
+ *
+ * RETURN : const pointer to cam_sync_related_sensors_event_info_t
+ *==========================================================================*/
+const cam_sync_related_sensors_event_info_t*
+ QCamera2HardwareInterface::getRelatedCamSyncInfo(void)
+{
+ return mParameters.getRelatedCamSyncInfo();
+}
+
+/*===========================================================================
+ * FUNCTION : setRelatedCamSyncInfo
+ *
+ * DESCRIPTION:sets the related cam sync info for this HWI instance
+ *
+ * PARAMETERS :
+ * @info : ptr to related cam info parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setRelatedCamSyncInfo(
+ cam_sync_related_sensors_event_info_t* info)
+{
+ if(info) {
+ return mParameters.setRelatedCamSyncInfo(info);
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getMpoComposition
+ *
+ * DESCRIPTION:function to retrieve whether Mpo composition should be enabled
+ * or not
+ *
+ * PARAMETERS :none
+ *
+ * RETURN : bool indicates whether mpo composition is enabled or not
+ *==========================================================================*/
+bool QCamera2HardwareInterface::getMpoComposition(void)
+{
+ LOGH("MpoComposition:%d ", m_bMpoEnabled);
+ return m_bMpoEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION : setMpoComposition
+ *
+ * DESCRIPTION:set if Mpo composition should be enabled for this HWI instance
+ *
+ * PARAMETERS :
+ * @enable : indicates whether Mpo composition enabled or not
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setMpoComposition(bool enable)
+{
+ // By default set Mpo composition to disable
+ m_bMpoEnabled = false;
+
+ // Enable Mpo composition only if
+ // 1) frame sync is ON between two cameras and
+ // 2) any advanced features are not enabled (AOST features) and
+ // 3) not in recording mode (for liveshot case)
+ // 4) flash is not needed
+ if ((getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) &&
+ !mParameters.isAdvCamFeaturesEnabled() &&
+ !mParameters.getRecordingHintValue() &&
+ !mFlashNeeded &&
+ !isLongshotEnabled()) {
+ m_bMpoEnabled = enable;
+ LOGH("MpoComposition:%d ", m_bMpoEnabled);
+ return NO_ERROR;
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getRecordingHintValue
+ *
+ * DESCRIPTION:function to retrieve recording hint value
+ *
+ * PARAMETERS :none
+ *
+ * RETURN : bool indicates whether recording hint is enabled or not
+ *==========================================================================*/
+bool QCamera2HardwareInterface::getRecordingHintValue(void)
+{
+ return mParameters.getRecordingHintValue();
+}
+
+/*===========================================================================
+ * FUNCTION : setRecordingHintValue
+ *
+ * DESCRIPTION:set recording hint value
+ *
+ * PARAMETERS :
+ * @enable : video hint value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setRecordingHintValue(int32_t value)
+{
+ return mParameters.updateRecordingHintValue(value);
+}
+
+/*===========================================================================
+ * FUNCTION : closeCamera
+ *
+ * DESCRIPTION: close camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::closeCamera()
+{
+ int rc = NO_ERROR;
+ int i;
+ char value[PROPERTY_VALUE_MAX];
+ LOGI("E");
+ if (!mCameraOpened) {
+ return NO_ERROR;
+ }
+ LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
+ mCameraId);
+
+ // set open flag to false
+ mCameraOpened = false;
+
+ // Reset Stream config info
+ mParameters.setStreamConfigure(false, false, true);
+
+ // deinit Parameters
+ mParameters.deinit();
+
+ // exit notifier
+ m_cbNotifier.exit();
+
+ // stop and deinit postprocessor
+ waitDeferredWork(mReprocJob);
+ // Close the JPEG session
+ waitDeferredWork(mJpegJob);
+ m_postprocessor.stop();
+ deinitJpegHandle();
+ m_postprocessor.deinit();
+ mInitPProcJob = 0; // reset job id, so pproc can be reinited later
+
+ m_thermalAdapter.deinit();
+
+ // delete all channels if not already deleted
+ for (i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+ if (m_channels[i] != NULL) {
+ m_channels[i]->stop();
+ delete m_channels[i];
+ m_channels[i] = NULL;
+ }
+ }
+
+ //free all pending api results here
+ if(m_apiResultList != NULL) {
+ api_result_list *apiResultList = m_apiResultList;
+ api_result_list *apiResultListNext;
+ while (apiResultList != NULL) {
+ apiResultListNext = apiResultList->next;
+ free(apiResultList);
+ apiResultList = apiResultListNext;
+ }
+ }
+
+ rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+ mCameraHandle = NULL;
+
+ //Notify display HAL that there is no active camera session
+ //but avoid calling the same during bootup. Refer to openCamera
+ //for more details.
+ property_get("service.bootanim.exit", value, "0");
+ if (atoi(value) == 1) {
+ pthread_mutex_lock(&gCamLock);
+ if (--gNumCameraSessions == 0) {
+ setCameraLaunchStatus(false);
+ }
+ pthread_mutex_unlock(&gCamLock);
+ }
+
+ if (mExifParams.debug_params) {
+ free(mExifParams.debug_params);
+ mExifParams.debug_params = NULL;
+ }
+
+ if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
+ LOGD("Failed to release flash for camera id: %d",
+ mCameraId);
+ }
+
+ LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
+ mCameraId, rc);
+
+ return rc;
+}
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+/*===========================================================================
+ * FUNCTION : initCapabilities
+ *
+ * DESCRIPTION: initialize camera capabilities in static data struct
+ *
+ * PARAMETERS :
+ * @cameraId : camera Id
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::initCapabilities(uint32_t cameraId,
+ mm_camera_vtbl_t *cameraHandle)
+{
+ ATRACE_CALL();
+ int rc = NO_ERROR;
+ QCameraHeapMemory *capabilityHeap = NULL;
+
+ /* Allocate memory for capability buffer */
+ capabilityHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+ rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), NON_SECURE);
+ if(rc != OK) {
+ LOGE("No memory for cappability");
+ goto allocate_failed;
+ }
+
+ /* Map memory for capability buffer */
+ memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
+
+ cam_buf_map_type_list bufMapList;
+ rc = QCameraBufferMaps::makeSingletonBufMapList(
+ CAM_MAPPING_BUF_TYPE_CAPABILITY,
+ 0 /*stream id*/, 0 /*buffer index*/, -1 /*plane index*/,
+ 0 /*cookie*/, capabilityHeap->getFd(0), sizeof(cam_capability_t),
+ bufMapList);
+
+ if (rc == NO_ERROR) {
+ rc = cameraHandle->ops->map_bufs(cameraHandle->camera_handle,
+ &bufMapList);
+ }
+
+ if(rc < 0) {
+ LOGE("failed to map capability buffer");
+ goto map_failed;
+ }
+
+ /* Query Capability */
+ rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
+ if(rc < 0) {
+ LOGE("failed to query capability");
+ goto query_failed;
+ }
+ gCamCapability[cameraId] =
+ (cam_capability_t *)malloc(sizeof(cam_capability_t));
+
+ if (!gCamCapability[cameraId]) {
+ LOGE("out of memory");
+ goto query_failed;
+ }
+ memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
+ sizeof(cam_capability_t));
+
+ int index;
+ for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
+ cam_analysis_info_t *p_analysis_info =
+ &gCamCapability[cameraId]->analysis_info[index];
+ p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
+ p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
+ }
+
+ rc = NO_ERROR;
+
+query_failed:
+ cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
+ CAM_MAPPING_BUF_TYPE_CAPABILITY);
+map_failed:
+ capabilityHeap->deallocate();
+ delete capabilityHeap;
+allocate_failed:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getCapabilities
+ *
+ * DESCRIPTION: query camera capabilities
+ *
+ * PARAMETERS :
+ * @cameraId : camera Id
+ * @info : camera info struct to be filled in with camera capabilities
+ *
+ * RETURN : int type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::getCapabilities(uint32_t cameraId,
+ struct camera_info *info, cam_sync_type_t *p_cam_type)
+{
+ ATRACE_CALL();
+ int rc = NO_ERROR;
+ struct camera_info *p_info = NULL;
+ pthread_mutex_lock(&gCamLock);
+ p_info = get_cam_info(cameraId, p_cam_type);
+ p_info->device_version = CAMERA_DEVICE_API_VERSION_1_0;
+ p_info->static_camera_characteristics = NULL;
+ memcpy(info, p_info, sizeof (struct camera_info));
+ pthread_mutex_unlock(&gCamLock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getCamHalCapabilities
+ *
+ * DESCRIPTION: get the HAL capabilities structure
+ *
+ * PARAMETERS :
+ * @cameraId : camera Id
+ *
+ * RETURN : capability structure of respective camera
+ *
+ *==========================================================================*/
+cam_capability_t* QCamera2HardwareInterface::getCamHalCapabilities()
+{
+ return gCamCapability[mCameraId];
+}
+
+/*===========================================================================
+ * FUNCTION : getBufNumRequired
+ *
+ * DESCRIPTION: return number of stream buffers needed for given stream type
+ *
+ * PARAMETERS :
+ * @stream_type : type of stream
+ *
+ * RETURN : number of buffers needed
+ *==========================================================================*/
+uint8_t QCamera2HardwareInterface::getBufNumRequired(cam_stream_type_t stream_type)
+{
+ int bufferCnt = 0;
+ int minCaptureBuffers = mParameters.getNumOfSnapshots();
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+ int persist_cnt = 0;
+
+ int zslQBuffers = mParameters.getZSLQueueDepth();
+
+ int minCircularBufNum = mParameters.getMaxUnmatchedFramesInQueue() +
+ CAMERA_MIN_JPEG_ENCODING_BUFFERS;
+
+ int maxStreamBuf = minCaptureBuffers + mParameters.getMaxUnmatchedFramesInQueue() +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+ mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+ mParameters.getNumOfExtraBuffersForImageProc() +
+ EXTRA_ZSL_PREVIEW_STREAM_BUF;
+
+ int minUndequeCount = 0;
+ if (!isNoDisplayMode()) {
+ if(mPreviewWindow != NULL) {
+ if (mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow,&minUndequeCount)
+ != 0) {
+ LOGW("get_min_undequeued_buffer_count failed");
+ //TODO: hardcoded because MIN_UNDEQUEUED_BUFFERS not defined
+ //minUndequeCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS;
+ minUndequeCount = MIN_UNDEQUEUED_BUFFERS;
+ }
+ } else {
+ //preview window might not be set at this point. So, query directly
+ //from BufferQueue implementation of gralloc buffers.
+ //minUndequeCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS;
+ //hardcoded because MIN_UNDEQUEUED_BUFFERS not defined. REVISIT
+ minUndequeCount = MIN_UNDEQUEUED_BUFFERS;
+ }
+ if (minUndequeCount != MIN_UNDEQUEUED_BUFFERS) {
+ // minUndequeCount from valid preview window != hardcoded MIN_UNDEQUEUED_BUFFERS
+ // and so change the MACRO as per minUndequeCount
+ LOGW("WARNING : minUndequeCount(%d) != hardcoded value(%d)",
+ minUndequeCount, MIN_UNDEQUEUED_BUFFERS);
+ }
+ }
+
+ LOGD("minCaptureBuffers = %d zslQBuffers = %d minCircularBufNum = %d"
+ "maxStreamBuf = %d minUndequeCount = %d",
+ minCaptureBuffers, zslQBuffers, minCircularBufNum,
+ maxStreamBuf, minUndequeCount);
+ // Get buffer count for the particular stream type
+ switch (stream_type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ {
+ if (mParameters.isZSLMode()) {
+ // We need to add two extra streming buffers to add
+ // flexibility in forming matched super buf in ZSL queue.
+ // with number being 'zslQBuffers + minCircularBufNum'
+ // we see preview buffers sometimes get dropped at CPP
+ // and super buf is not forming in ZSL Q for long time.
+
+ bufferCnt = zslQBuffers + minCircularBufNum +
+ mParameters.getNumOfExtraBuffersForImageProc() +
+ mParameters.getNumOfExtraBuffersForPreview() +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded();
+ } else {
+ bufferCnt = CAMERA_MIN_STREAMING_BUFFERS +
+ mParameters.getMaxUnmatchedFramesInQueue() +
+ mParameters.getNumOfExtraBuffersForPreview();
+ }
+ // ISP allocates native preview buffers and so reducing same from HAL allocation
+ if (bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS )
+ bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
+
+ if (mParameters.getRecordingHintValue() == true)
+ bufferCnt += EXTRA_ZSL_PREVIEW_STREAM_BUF;
+
+ // Add the display minUndequeCount count on top of camera requirement
+ bufferCnt += minUndequeCount;
+
+ property_get("persist.camera.preview_yuv", value, "0");
+ persist_cnt = atoi(value);
+ if ((persist_cnt < CAM_MAX_NUM_BUFS_PER_STREAM)
+ && (bufferCnt < persist_cnt)) {
+ bufferCnt = persist_cnt;
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ {
+ bufferCnt = minCaptureBuffers +
+ mParameters.getMaxUnmatchedFramesInQueue() +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+ mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+ mParameters.getNumOfExtraBuffersForImageProc();
+
+ if (bufferCnt > maxStreamBuf) {
+ bufferCnt = maxStreamBuf;
+ }
+ bufferCnt += minUndequeCount;
+ }
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ {
+ if (mParameters.isZSLMode() || mLongshotEnabled) {
+ if ((minCaptureBuffers == 1 || mParameters.isUbiRefocus()) &&
+ !mLongshotEnabled) {
+ // Single ZSL snapshot case
+ bufferCnt = zslQBuffers + CAMERA_MIN_STREAMING_BUFFERS +
+ mParameters.getNumOfExtraBuffersForImageProc();
+ }
+ else {
+ // ZSL Burst or Longshot case
+ bufferCnt = zslQBuffers + minCircularBufNum +
+ mParameters.getNumOfExtraBuffersForImageProc();
+ }
+ if (getSensorType() == CAM_SENSOR_YUV && bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS) {
+ //ISP allocates native buffers in YUV case
+ bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
+ }
+ } else {
+ bufferCnt = minCaptureBuffers +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+ mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+ mParameters.getNumOfExtraBuffersForImageProc();
+
+ if (bufferCnt > maxStreamBuf) {
+ bufferCnt = maxStreamBuf;
+ }
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+
+ if (isRdiMode() || raw_yuv) {
+ bufferCnt = zslQBuffers + minCircularBufNum;
+ } else if (mParameters.isZSLMode()) {
+ bufferCnt = zslQBuffers + minCircularBufNum;
+ if (getSensorType() == CAM_SENSOR_YUV && bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS) {
+ //ISP allocates native buffers in YUV case
+ bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
+ }
+
+ } else {
+ bufferCnt = minCaptureBuffers +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+ mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+ mParameters.getNumOfExtraBuffersForImageProc();
+
+ if (bufferCnt > maxStreamBuf) {
+ bufferCnt = maxStreamBuf;
+ }
+ }
+
+ property_get("persist.camera.preview_raw", value, "0");
+ persist_cnt = atoi(value);
+ if ((persist_cnt < CAM_MAX_NUM_BUFS_PER_STREAM)
+ && (bufferCnt < persist_cnt)) {
+ bufferCnt = persist_cnt;
+ }
+ property_get("persist.camera.video_raw", value, "0");
+ persist_cnt = atoi(value);
+ if ((persist_cnt < CAM_MAX_NUM_BUFS_PER_STREAM)
+ && (bufferCnt < persist_cnt)) {
+ bufferCnt = persist_cnt;
+ }
+
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ {
+ if (mParameters.getBufBatchCount()) {
+ //Video Buffer in case of HFR or camera batching..
+ bufferCnt = CAMERA_MIN_CAMERA_BATCH_BUFFERS;
+ } else if (mParameters.getVideoBatchSize()) {
+ //Video Buffer count only for HAL to HAL batching.
+ bufferCnt = (CAMERA_MIN_VIDEO_BATCH_BUFFERS
+ * mParameters.getVideoBatchSize());
+ if (bufferCnt < CAMERA_MIN_VIDEO_BUFFERS) {
+ bufferCnt = CAMERA_MIN_VIDEO_BUFFERS;
+ }
+ } else {
+ // No batching enabled.
+ bufferCnt = CAMERA_MIN_VIDEO_BUFFERS;
+ }
+
+ bufferCnt += mParameters.getNumOfExtraBuffersForVideo();
+ //if its 4K encoding usecase, then add extra buffer
+ cam_dimension_t dim;
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_VIDEO, dim);
+ if (is4k2kResolution(&dim)) {
+ //get additional buffer count
+ property_get("vidc.enc.dcvs.extra-buff-count", value, "0");
+ bufferCnt += atoi(value);
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ {
+ if (mParameters.isZSLMode()) {
+ // MetaData buffers should be >= (Preview buffers-minUndequeCount)
+ bufferCnt = zslQBuffers + minCircularBufNum +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+ mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+ mParameters.getNumOfExtraBuffersForImageProc() +
+ EXTRA_ZSL_PREVIEW_STREAM_BUF;
+ } else {
+ bufferCnt = minCaptureBuffers +
+ mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+ mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+ mParameters.getMaxUnmatchedFramesInQueue() +
+ CAMERA_MIN_STREAMING_BUFFERS +
+ mParameters.getNumOfExtraBuffersForImageProc();
+
+ if (bufferCnt > zslQBuffers + minCircularBufNum) {
+ bufferCnt = zslQBuffers + minCircularBufNum;
+ }
+ }
+ if (CAMERA_MIN_METADATA_BUFFERS > bufferCnt) {
+ bufferCnt = CAMERA_MIN_METADATA_BUFFERS;
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ {
+ bufferCnt = minCaptureBuffers;
+ // One of the ubifocus buffers is miscellaneous buffer
+ if (mParameters.isUbiRefocus()) {
+ bufferCnt -= 1;
+ }
+ if (mLongshotEnabled) {
+ bufferCnt = mParameters.getLongshotStages();
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_CALLBACK:
+ bufferCnt = CAMERA_MIN_CALLBACK_BUFFERS;
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ case CAM_STREAM_TYPE_DEFAULT:
+ case CAM_STREAM_TYPE_MAX:
+ default:
+ bufferCnt = 0;
+ break;
+ }
+
+ LOGH("Buffer count = %d for stream type = %d", bufferCnt, stream_type);
+ if (CAM_MAX_NUM_BUFS_PER_STREAM < bufferCnt) {
+ LOGW("Buffer count %d for stream type %d exceeds limit %d",
+ bufferCnt, stream_type, CAM_MAX_NUM_BUFS_PER_STREAM);
+ return CAM_MAX_NUM_BUFS_PER_STREAM;
+ }
+
+ return (uint8_t)bufferCnt;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateStreamBuf
+ *
+ * DESCRIPTION: alocate stream buffers
+ *
+ * PARAMETERS :
+ * @stream_type : type of stream
+ * @size : size of buffer
+ * @stride : stride of buffer
+ * @scanline : scanline of buffer
+ * @bufferCnt : [IN/OUT] minimum num of buffers to be allocated.
+ * could be modified during allocation if more buffers needed
+ *
+ * RETURN : ptr to a memory obj that holds stream buffers.
+ * NULL if failed
+ *==========================================================================*/
+QCameraMemory *QCamera2HardwareInterface::allocateStreamBuf(
+ cam_stream_type_t stream_type, size_t size, int stride, int scanline,
+ uint8_t &bufferCnt)
+{
+ int rc = NO_ERROR;
+ QCameraMemory *mem = NULL;
+ bool bCachedMem = QCAMERA_ION_USE_CACHE;
+ bool bPoolMem = false;
+ char value[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.mem.usepool", value, "1");
+ if (atoi(value) == 1) {
+ bPoolMem = true;
+ }
+
+ // Allocate stream buffer memory object
+ switch (stream_type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ {
+ if (isNoDisplayMode()) {
+ mem = new QCameraStreamMemory(mGetMemory,
+ bCachedMem,
+ (bPoolMem) ? &m_memoryPool : NULL,
+ stream_type);
+ } else {
+ cam_dimension_t dim;
+ int minFPS, maxFPS;
+ QCameraGrallocMemory *grallocMemory =
+ new QCameraGrallocMemory(mGetMemory);
+
+ mParameters.getStreamDimension(stream_type, dim);
+ /* we are interested only in maxfps here */
+ mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+ int usage = 0;
+ if(mParameters.isUBWCEnabled()) {
+ cam_format_t fmt;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+ if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+ usage = GRALLOC_USAGE_PRIVATE_ALLOC_UBWC ;
+ }
+ }
+ if (grallocMemory) {
+ grallocMemory->setMappable(
+ CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS);
+ grallocMemory->setWindowInfo(mPreviewWindow,
+ dim.width,dim.height, stride, scanline,
+ mParameters.getPreviewHalPixelFormat(),
+ maxFPS, usage);
+ pthread_mutex_lock(&mGrallocLock);
+ if (bufferCnt > CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS) {
+ mEnqueuedBuffers = (bufferCnt -
+ CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS);
+ } else {
+ mEnqueuedBuffers = 0;
+ }
+ pthread_mutex_unlock(&mGrallocLock);
+ }
+ mem = grallocMemory;
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ {
+ if (isNoDisplayMode() || isPreviewRestartEnabled()) {
+ mem = new QCameraStreamMemory(mGetMemory, bCachedMem);
+ } else {
+ cam_dimension_t dim;
+ int minFPS, maxFPS;
+ QCameraGrallocMemory *grallocMemory =
+ new QCameraGrallocMemory(mGetMemory);
+
+ mParameters.getStreamDimension(stream_type, dim);
+ /* we are interested only in maxfps here */
+ mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+ if (grallocMemory) {
+ grallocMemory->setWindowInfo(mPreviewWindow, dim.width,
+ dim.height, stride, scanline,
+ mParameters.getPreviewHalPixelFormat(), maxFPS);
+ }
+ mem = grallocMemory;
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_RAW:
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ mem = new QCameraStreamMemory(mGetMemory,
+ bCachedMem,
+ (bPoolMem) ? &m_memoryPool : NULL,
+ stream_type);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ {
+ if (mMetadataMem == NULL) {
+ mem = new QCameraMetadataStreamMemory(QCAMERA_ION_USE_CACHE);
+ } else {
+ mem = mMetadataMem;
+ mMetadataMem = NULL;
+
+ int32_t numAdditionalBuffers = bufferCnt - mem->getCnt();
+ if (numAdditionalBuffers > 0) {
+ rc = mem->allocateMore(numAdditionalBuffers, size);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to allocate additional buffers, "
+ "but attempting to proceed.");
+ }
+ }
+ bufferCnt = mem->getCnt();
+ // The memory is already allocated and initialized, so
+ // simply return here.
+ return mem;
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ {
+ //Use uncached allocation by default
+ if (mParameters.isVideoBuffersCached() || mParameters.isSeeMoreEnabled() ||
+ mParameters.isHighQualityNoiseReductionMode()) {
+ bCachedMem = QCAMERA_ION_USE_CACHE;
+ }
+ else {
+ bCachedMem = QCAMERA_ION_USE_NOCACHE;
+ }
+
+ QCameraVideoMemory *videoMemory = NULL;
+ if (mParameters.getVideoBatchSize()) {
+ videoMemory = new QCameraVideoMemory(
+ mGetMemory, FALSE, QCAMERA_MEM_TYPE_BATCH);
+ if (videoMemory == NULL) {
+ LOGE("Out of memory for video batching obj");
+ return NULL;
+ }
+ /*
+ * numFDs = BATCH size
+ * numInts = 5 // OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT
+ */
+ rc = videoMemory->allocateMeta(
+ CAMERA_MIN_VIDEO_BATCH_BUFFERS,
+ mParameters.getVideoBatchSize(),
+ VIDEO_METADATA_NUM_INTS);
+ if (rc < 0) {
+ delete videoMemory;
+ return NULL;
+ }
+ } else {
+ videoMemory =
+ new QCameraVideoMemory(mGetMemory, bCachedMem);
+ if (videoMemory == NULL) {
+ LOGE("Out of memory for video obj");
+ return NULL;
+ }
+ }
+
+ int usage = 0;
+ cam_format_t fmt;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_VIDEO,fmt);
+ if (mParameters.isUBWCEnabled() && (fmt == CAM_FORMAT_YUV_420_NV12_UBWC)) {
+ usage = private_handle_t::PRIV_FLAGS_UBWC_ALIGNED;
+ }
+ videoMemory->setVideoInfo(usage, fmt);
+ mem = videoMemory;
+ mVideoMem = videoMemory;
+ }
+ break;
+ case CAM_STREAM_TYPE_CALLBACK:
+ mem = new QCameraStreamMemory(mGetMemory,
+ bCachedMem,
+ (bPoolMem) ? &m_memoryPool : NULL,
+ stream_type);
+ break;
+ case CAM_STREAM_TYPE_DEFAULT:
+ case CAM_STREAM_TYPE_MAX:
+ default:
+ break;
+ }
+ if (!mem) {
+ return NULL;
+ }
+
+ if (bufferCnt > 0) {
+ if (mParameters.isSecureMode() &&
+ (stream_type == CAM_STREAM_TYPE_RAW) &&
+ (mParameters.isRdiMode())) {
+ LOGD("Allocating %d secure buffers of size %d ", bufferCnt, size);
+ rc = mem->allocate(bufferCnt, size, SECURE);
+ } else {
+ rc = mem->allocate(bufferCnt, size, NON_SECURE);
+ }
+ if (rc < 0) {
+ delete mem;
+ return NULL;
+ }
+ bufferCnt = mem->getCnt();
+ }
+ LOGH("rc = %d type = %d count = %d size = %d cache = %d, pool = %d",
+ rc, stream_type, bufferCnt, size, bCachedMem, bPoolMem);
+ return mem;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateMoreStreamBuf
+ *
+ * DESCRIPTION: alocate more stream buffers from the memory object
+ *
+ * PARAMETERS :
+ * @mem_obj : memory object ptr
+ * @size : size of buffer
+ * @bufferCnt : [IN/OUT] additional number of buffers to be allocated.
+ * output will be the number of total buffers
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::allocateMoreStreamBuf(
+ QCameraMemory *mem_obj, size_t size, uint8_t &bufferCnt)
+{
+ int rc = NO_ERROR;
+
+ if (bufferCnt > 0) {
+ rc = mem_obj->allocateMore(bufferCnt, size);
+ bufferCnt = mem_obj->getCnt();
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateMiscBuf
+ *
+ * DESCRIPTION: alocate miscellaneous buffer
+ *
+ * PARAMETERS :
+ * @streamInfo : stream info
+ *
+ * RETURN : ptr to a memory obj that holds stream info buffer.
+ * NULL if failed
+ *==========================================================================*/
+QCameraHeapMemory *QCamera2HardwareInterface::allocateMiscBuf(
+ cam_stream_info_t *streamInfo)
+{
+ int rc = NO_ERROR;
+ uint8_t bufNum = 0;
+ size_t bufSize = 0;
+ QCameraHeapMemory *miscBuf = NULL;
+ cam_feature_mask_t feature_mask =
+ streamInfo->reprocess_config.pp_feature_config.feature_mask;
+
+ switch (streamInfo->stream_type) {
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ if (CAM_QCOM_FEATURE_TRUEPORTRAIT & feature_mask) {
+ bufNum = 1;
+ bufSize = mParameters.getTPMaxMetaSize();
+ } else if (CAM_QCOM_FEATURE_REFOCUS & feature_mask) {
+ bufNum = 1;
+ bufSize = mParameters.getRefocusMaxMetaSize();
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (bufNum && bufSize) {
+ miscBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+
+ if (!miscBuf) {
+ LOGE("Unable to allocate miscBuf object");
+ return NULL;
+ }
+
+ rc = miscBuf->allocate(bufNum, bufSize, NON_SECURE);
+ if (rc < 0) {
+ LOGE("Failed to allocate misc buffer memory");
+ delete miscBuf;
+ return NULL;
+ }
+ }
+
+ return miscBuf;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateStreamInfoBuf
+ *
+ * DESCRIPTION: alocate stream info buffer
+ *
+ * PARAMETERS :
+ * @stream_type : type of stream
+ *
+ * RETURN : ptr to a memory obj that holds stream info buffer.
+ * NULL if failed
+ *==========================================================================*/
+QCameraHeapMemory *QCamera2HardwareInterface::allocateStreamInfoBuf(
+ cam_stream_type_t stream_type)
+{
+ int rc = NO_ERROR;
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+
+ QCameraHeapMemory *streamInfoBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+ if (!streamInfoBuf) {
+ LOGE("allocateStreamInfoBuf: Unable to allocate streamInfo object");
+ return NULL;
+ }
+
+ rc = streamInfoBuf->allocate(1, sizeof(cam_stream_info_t), NON_SECURE);
+ if (rc < 0) {
+ LOGE("allocateStreamInfoBuf: Failed to allocate stream info memory");
+ delete streamInfoBuf;
+ return NULL;
+ }
+
+ cam_stream_info_t *streamInfo = (cam_stream_info_t *)streamInfoBuf->getPtr(0);
+ memset(streamInfo, 0, sizeof(cam_stream_info_t));
+ streamInfo->stream_type = stream_type;
+ rc = mParameters.getStreamFormat(stream_type, streamInfo->fmt);
+ rc = mParameters.getStreamDimension(stream_type, streamInfo->dim);
+ rc = mParameters.getStreamRotation(stream_type, streamInfo->pp_config, streamInfo->dim);
+ streamInfo->num_bufs = getBufNumRequired(stream_type);
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ streamInfo->is_secure = NON_SECURE;
+
+ switch (stream_type) {
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ if ((mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) ||
+ mLongshotEnabled) {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+ streamInfo->num_of_burst = (uint8_t)
+ (mParameters.getNumOfSnapshots()
+ + mParameters.getNumOfExtraHDRInBufsIfNeeded()
+ - mParameters.getNumOfExtraHDROutBufsIfNeeded()
+ + mParameters.getNumOfExtraBuffersForImageProc());
+ }
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+ if ((mParameters.isZSLMode()) || (isRdiMode()) || (raw_yuv)) {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+ streamInfo->num_of_burst = mParameters.getNumOfSnapshots();
+ }
+ if (mParameters.isSecureMode() && mParameters.isRdiMode()) {
+ streamInfo->is_secure = SECURE;
+ } else {
+ streamInfo->is_secure = NON_SECURE;
+ }
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ if (mLongshotEnabled) {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+ streamInfo->num_of_burst = (uint8_t)(mParameters.getNumOfSnapshots()
+ + mParameters.getNumOfExtraHDRInBufsIfNeeded()
+ - mParameters.getNumOfExtraHDROutBufsIfNeeded()
+ + mParameters.getNumOfExtraBuffersForImageProc());
+ }
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ streamInfo->dis_enable = mParameters.isDISEnabled();
+ if (mParameters.getBufBatchCount()) {
+ //Update stream info structure with batch mode info
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_BATCH;
+ streamInfo->user_buf_info.frame_buf_cnt = mParameters.getBufBatchCount();
+ streamInfo->user_buf_info.size =
+ (uint32_t)(sizeof(struct msm_camera_user_buf_cont_t));
+ cam_fps_range_t pFpsRange;
+ mParameters.getHfrFps(pFpsRange);
+ streamInfo->user_buf_info.frameInterval =
+ (long)((1000/pFpsRange.video_max_fps) * 1000);
+ LOGH("Video Batch Count = %d, interval = %d",
+ streamInfo->user_buf_info.frame_buf_cnt,
+ streamInfo->user_buf_info.frameInterval);
+ }
+ if (mParameters.getRecordingHintValue()) {
+ if(mParameters.isDISEnabled()) {
+ streamInfo->is_type = mParameters.getISType();
+ } else {
+ streamInfo->is_type = IS_TYPE_NONE;
+ }
+ }
+ if (mParameters.isSecureMode()) {
+ streamInfo->is_secure = SECURE;
+ }
+ break;
+ case CAM_STREAM_TYPE_PREVIEW:
+ if (mParameters.getRecordingHintValue()) {
+ if(mParameters.isDISEnabled()) {
+ streamInfo->is_type = mParameters.getPreviewISType();
+ } else {
+ streamInfo->is_type = IS_TYPE_NONE;
+ }
+ }
+ if (mParameters.isSecureMode()) {
+ streamInfo->is_secure = SECURE;
+ }
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ streamInfo->noFrameExpected = 1;
+ break;
+ default:
+ break;
+ }
+
+ // Update feature mask
+ mParameters.updatePpFeatureMask(stream_type);
+
+ // Get feature mask
+ mParameters.getStreamPpMask(stream_type, streamInfo->pp_config.feature_mask);
+
+ // Update pp config
+ if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_FLIP) {
+ int flipMode = mParameters.getFlipMode(stream_type);
+ if (flipMode > 0) {
+ streamInfo->pp_config.flip = (uint32_t)flipMode;
+ }
+ }
+ if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_SHARPNESS) {
+ streamInfo->pp_config.sharpness = mParameters.getSharpness();
+ }
+ if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_EFFECT) {
+ streamInfo->pp_config.effect = mParameters.getEffectValue();
+ }
+
+ if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_DENOISE2D) {
+ streamInfo->pp_config.denoise2d.denoise_enable = 1;
+ streamInfo->pp_config.denoise2d.process_plates =
+ mParameters.getDenoiseProcessPlate(CAM_INTF_PARM_WAVELET_DENOISE);
+ }
+
+ if (!((needReprocess()) && (CAM_STREAM_TYPE_SNAPSHOT == stream_type ||
+ CAM_STREAM_TYPE_RAW == stream_type))) {
+ if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_CROP)
+ streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+ if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_SCALE)
+ streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+ }
+
+ LOGH("type %d, fmt %d, dim %dx%d, num_bufs %d mask = 0x%x\n",
+ stream_type, streamInfo->fmt, streamInfo->dim.width,
+ streamInfo->dim.height, streamInfo->num_bufs,
+ streamInfo->pp_config.feature_mask);
+
+ return streamInfoBuf;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateStreamUserBuf
+ *
+ * DESCRIPTION: allocate user ptr for stream buffers
+ *
+ * PARAMETERS :
+ * @streamInfo : stream info structure
+ *
+ * RETURN : ptr to a memory obj that holds stream info buffer.
+ * NULL if failed
+
+ *==========================================================================*/
+QCameraMemory *QCamera2HardwareInterface::allocateStreamUserBuf(
+ cam_stream_info_t *streamInfo)
+{
+ int rc = NO_ERROR;
+ QCameraMemory *mem = NULL;
+ int size = 0;
+
+ if (streamInfo->streaming_mode != CAM_STREAMING_MODE_BATCH) {
+ LOGE("Stream is not in BATCH mode. Invalid Stream");
+ return NULL;
+ }
+
+ // Allocate stream user buffer memory object
+ switch (streamInfo->stream_type) {
+ case CAM_STREAM_TYPE_VIDEO: {
+ QCameraVideoMemory *video_mem = new QCameraVideoMemory(
+ mGetMemory, FALSE, QCAMERA_MEM_TYPE_BATCH);
+ if (video_mem == NULL) {
+ LOGE("Out of memory for video obj");
+ return NULL;
+ }
+ /*
+ * numFDs = BATCH size
+ * numInts = 5 // OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT
+ */
+ rc = video_mem->allocateMeta(streamInfo->num_bufs,
+ mParameters.getBufBatchCount(), VIDEO_METADATA_NUM_INTS);
+ if (rc < 0) {
+ LOGE("allocateMeta failed");
+ delete video_mem;
+ return NULL;
+ }
+ int usage = 0;
+ cam_format_t fmt;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_VIDEO, fmt);
+ if(mParameters.isUBWCEnabled() && (fmt == CAM_FORMAT_YUV_420_NV12_UBWC)) {
+ usage = private_handle_t::PRIV_FLAGS_UBWC_ALIGNED;
+ }
+ video_mem->setVideoInfo(usage, fmt);
+ mem = static_cast<QCameraMemory *>(video_mem);
+ mVideoMem = video_mem;
+ }
+ break;
+
+ case CAM_STREAM_TYPE_PREVIEW:
+ case CAM_STREAM_TYPE_POSTVIEW:
+ case CAM_STREAM_TYPE_ANALYSIS:
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_RAW:
+ case CAM_STREAM_TYPE_METADATA:
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ case CAM_STREAM_TYPE_CALLBACK:
+ LOGE("Stream type Not supported.for BATCH processing");
+ break;
+
+ case CAM_STREAM_TYPE_DEFAULT:
+ case CAM_STREAM_TYPE_MAX:
+ default:
+ break;
+ }
+ if (!mem) {
+ LOGE("Failed to allocate mem");
+ return NULL;
+ }
+
+ /*Size of this buffer will be number of batch buffer */
+ size = PAD_TO_SIZE((streamInfo->num_bufs * streamInfo->user_buf_info.size),
+ CAM_PAD_TO_4K);
+
+ LOGH("Allocating BATCH Buffer count = %d", streamInfo->num_bufs);
+
+ if (size > 0) {
+ // Allocating one buffer for all batch buffers
+ rc = mem->allocate(1, size, NON_SECURE);
+ if (rc < 0) {
+ delete mem;
+ return NULL;
+ }
+ }
+ return mem;
+}
+
+
+/*===========================================================================
+ * FUNCTION : waitForDeferredAlloc
+ *
+ * DESCRIPTION: Wait for deferred allocation, if applicable
+ * (applicable only for metadata buffers so far)
+ *
+ * PARAMETERS :
+ * @stream_type : type of stream to (possibly) wait for
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::waitForDeferredAlloc(cam_stream_type_t stream_type)
+{
+ if (stream_type == CAM_STREAM_TYPE_METADATA) {
+ waitDeferredWork(mMetadataAllocJob);
+ }
+}
+
+
+/*===========================================================================
+ * FUNCTION : setPreviewWindow
+ *
+ * DESCRIPTION: set preview window impl
+ *
+ * PARAMETERS :
+ * @window : ptr to window ops table struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setPreviewWindow(
+ struct preview_stream_ops *window)
+{
+ mPreviewWindow = window;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setCallBacks
+ *
+ * DESCRIPTION: set callbacks impl
+ *
+ * PARAMETERS :
+ * @notify_cb : notify cb
+ * @data_cb : data cb
+ * @data_cb_timestamp : data cb with time stamp
+ * @get_memory : request memory ops table
+ * @user : user data ptr
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setCallBacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mGetMemory = get_memory;
+ mCallbackCookie = user;
+ m_cbNotifier.setCallbacks(notify_cb, data_cb, data_cb_timestamp, user);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegCallBacks
+ *
+ * DESCRIPTION: set JPEG callbacks impl
+ *
+ * PARAMETERS :
+ * @jpegCb : Jpeg callback method
+ * @callbackCookie : callback cookie
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+void QCamera2HardwareInterface::setJpegCallBacks(jpeg_data_callback jpegCb,
+ void *callbackCookie)
+{
+ LOGH("camera id %d", getCameraId());
+ mJpegCb = jpegCb;
+ mJpegCallbackCookie = callbackCookie;
+ m_cbNotifier.setJpegCallBacks(mJpegCb, mJpegCallbackCookie);
+}
+
+/*===========================================================================
+ * FUNCTION : enableMsgType
+ *
+ * DESCRIPTION: enable msg type impl
+ *
+ * PARAMETERS :
+ * @msg_type : msg type mask to be enabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::enableMsgType(int32_t msg_type)
+{
+ int32_t rc = NO_ERROR;
+
+ if (mParameters.isUBWCEnabled()) {
+ /*Need Special CALLBACK stream incase application requesting for
+ Preview callback in UBWC case*/
+ if (!(msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) &&
+ (msg_type & CAMERA_MSG_PREVIEW_FRAME)) {
+ // Start callback channel only when preview/zsl channel is active
+ QCameraChannel* previewCh = NULL;
+ if (isZSLMode() && (getRecordingHintValue() != true)) {
+ previewCh = m_channels[QCAMERA_CH_TYPE_ZSL];
+ } else {
+ previewCh = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+ }
+ QCameraChannel* callbackCh = m_channels[QCAMERA_CH_TYPE_CALLBACK];
+ if ((callbackCh != NULL) &&
+ (previewCh != NULL) && previewCh->isActive()) {
+ rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
+ if (rc != NO_ERROR) {
+ LOGE("START Callback Channel failed");
+ }
+ }
+ }
+ }
+ mMsgEnabled |= msg_type;
+ LOGH("(0x%x) : mMsgEnabled = 0x%x rc = %d", msg_type , mMsgEnabled, rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : disableMsgType
+ *
+ * DESCRIPTION: disable msg type impl
+ *
+ * PARAMETERS :
+ * @msg_type : msg type mask to be disabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::disableMsgType(int32_t msg_type)
+{
+ int32_t rc = NO_ERROR;
+
+ if (mParameters.isUBWCEnabled()) {
+ /*STOP CALLBACK STREAM*/
+ if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) &&
+ (msg_type & CAMERA_MSG_PREVIEW_FRAME)) {
+ // Stop callback channel only if it is active
+ if ((m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL) &&
+ (m_channels[QCAMERA_CH_TYPE_CALLBACK]->isActive())) {
+ rc = stopChannel(QCAMERA_CH_TYPE_CALLBACK);
+ if (rc != NO_ERROR) {
+ LOGE("STOP Callback Channel failed");
+ }
+ }
+ }
+ }
+ mMsgEnabled &= ~msg_type;
+ LOGH("(0x%x) : mMsgEnabled = 0x%x rc = %d", msg_type , mMsgEnabled, rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : msgTypeEnabled
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled
+ *
+ * PARAMETERS :
+ * @msg_type : msg type mask
+ *
+ * RETURN : 0 -- not enabled
+ * none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabled(int32_t msg_type)
+{
+ return (mMsgEnabled & msg_type);
+}
+
+/*===========================================================================
+ * FUNCTION : msgTypeEnabledWithLock
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled with lock
+ *
+ * PARAMETERS :
+ * @msg_type : msg type mask
+ *
+ * RETURN : 0 -- not enabled
+ * none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabledWithLock(int32_t msg_type)
+{
+ int enabled = 0;
+ lockAPI();
+ enabled = mMsgEnabled & msg_type;
+ unlockAPI();
+ return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION : startPreview
+ *
+ * DESCRIPTION: start preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startPreview()
+{
+ KPI_ATRACE_CALL();
+ int32_t rc = NO_ERROR;
+
+ LOGI("E ZSL = %d Recording Hint = %d", mParameters.isZSLMode(),
+ mParameters.getRecordingHintValue());
+
+ m_perfLock.lock_acq();
+
+ updateThermalLevel((void *)&mThermalLevel);
+
+ setDisplayFrameSkip();
+
+ // start preview stream
+ if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
+ rc = startChannel(QCAMERA_CH_TYPE_ZSL);
+ } else {
+ rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
+ }
+
+ if (rc != NO_ERROR) {
+ LOGE("failed to start channels");
+ m_perfLock.lock_rel();
+ return rc;
+ }
+
+ if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME))
+ && (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL)) {
+ rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
+ if (rc != NO_ERROR) {
+ LOGE("failed to start callback stream");
+ stopChannel(QCAMERA_CH_TYPE_ZSL);
+ stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+ m_perfLock.lock_rel();
+ return rc;
+ }
+ }
+
+ updatePostPreviewParameters();
+ m_stateMachine.setPreviewCallbackNeeded(true);
+
+ // if job id is non-zero, that means the postproc init job is already
+ // pending or complete
+ if (mInitPProcJob == 0) {
+ mInitPProcJob = deferPPInit();
+ if (mInitPProcJob == 0) {
+ LOGE("Unable to initialize postprocessor, mCameraHandle = %p",
+ mCameraHandle);
+ rc = -ENOMEM;
+ m_perfLock.lock_rel();
+ return rc;
+ }
+ }
+ m_perfLock.lock_rel();
+
+ if (rc == NO_ERROR) {
+ // Set power Hint for preview
+ m_perfLock.powerHint(POWER_HINT_CAM_PREVIEW, true);
+ }
+
+ LOGI("X rc = %d", rc);
+ return rc;
+}
+
+int32_t QCamera2HardwareInterface::updatePostPreviewParameters() {
+ // Enable OIS only in Camera mode and 4k2k camcoder mode
+ int32_t rc = NO_ERROR;
+ rc = mParameters.updateOisValue(1);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : stopPreview
+ *
+ * DESCRIPTION: stop preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopPreview()
+{
+ KPI_ATRACE_CALL();
+ LOGI("E");
+ mNumPreviewFaces = -1;
+ mActiveAF = false;
+
+ // Disable power Hint for preview
+ m_perfLock.powerHint(POWER_HINT_CAM_PREVIEW, false);
+
+ m_perfLock.lock_acq();
+
+ // stop preview stream
+ stopChannel(QCAMERA_CH_TYPE_CALLBACK);
+ stopChannel(QCAMERA_CH_TYPE_ZSL);
+ stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+ stopChannel(QCAMERA_CH_TYPE_RAW);
+
+ m_cbNotifier.flushPreviewNotifications();
+ //add for ts makeup
+#ifdef TARGET_TS_MAKEUP
+ ts_makeup_finish();
+#endif
+ // delete all channels from preparePreview
+ unpreparePreview();
+
+ m_perfLock.lock_rel();
+
+ LOGI("X");
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : storeMetaDataInBuffers
+ *
+ * DESCRIPTION: enable store meta data in buffers for video frames impl
+ *
+ * PARAMETERS :
+ * @enable : flag if need enable
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::storeMetaDataInBuffers(int enable)
+{
+ mStoreMetaDataInFrame = enable;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : preStartRecording
+ *
+ * DESCRIPTION: Prepare start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::preStartRecording()
+{
+ int32_t rc = NO_ERROR;
+ LOGH("E");
+ if (mParameters.getRecordingHintValue() == false) {
+
+ // Give HWI control to restart preview only in single camera mode.
+ // In dual-cam mode, this control belongs to muxer.
+ if (getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+ LOGH("start recording when hint is false, stop preview first");
+ stopPreview();
+
+ // Set recording hint to TRUE
+ mParameters.updateRecordingHintValue(TRUE);
+ rc = preparePreview();
+ if (rc == NO_ERROR) {
+ rc = startPreview();
+ }
+ }
+ else
+ {
+ // For dual cam mode, update the flag mPreviewRestartNeeded to true
+ // Restart control will be handled by muxer.
+ mPreviewRestartNeeded = true;
+ }
+ }
+
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : startRecording
+ *
+ * DESCRIPTION: start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startRecording()
+{
+ int32_t rc = NO_ERROR;
+
+ LOGI("E");
+ mVideoMem = NULL;
+ //link meta stream with video channel if low power mode.
+ if (isLowPowerMode()) {
+ // Find and try to link a metadata stream from preview channel
+ QCameraChannel *pMetaChannel = NULL;
+ QCameraStream *pMetaStream = NULL;
+ QCameraChannel *pVideoChannel = m_channels[QCAMERA_CH_TYPE_VIDEO];
+
+ if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+ pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+ uint32_t streamNum = pMetaChannel->getNumOfStreams();
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+ pStream = pMetaChannel->getStreamByIndex(i);
+ if ((NULL != pStream) &&
+ (CAM_STREAM_TYPE_METADATA == pStream->getMyType())) {
+ pMetaStream = pStream;
+ break;
+ }
+ }
+ }
+
+ if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+ rc = pVideoChannel->linkStream(pMetaChannel, pMetaStream);
+ if (NO_ERROR != rc) {
+ LOGW("Metadata stream link failed %d", rc);
+ }
+ }
+ }
+
+ if (rc == NO_ERROR) {
+ rc = startChannel(QCAMERA_CH_TYPE_VIDEO);
+ }
+
+ if (mParameters.isTNRSnapshotEnabled() && !isLowPowerMode()) {
+ QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+ if (!mParameters.is4k2kVideoResolution()) {
+ // Find and try to link a metadata stream from preview channel
+ QCameraChannel *pMetaChannel = NULL;
+ QCameraStream *pMetaStream = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+ pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+ uint32_t streamNum = pMetaChannel->getNumOfStreams();
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+ pStream = pMetaChannel->getStreamByIndex(i);
+ if ((NULL != pStream) &&
+ (CAM_STREAM_TYPE_METADATA ==
+ pStream->getMyType())) {
+ pMetaStream = pStream;
+ break;
+ }
+ }
+ }
+
+ if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+ rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+ if (NO_ERROR != rc) {
+ LOGW("Metadata stream link failed %d", rc);
+ }
+ }
+ }
+ LOGH("START snapshot Channel for TNR processing");
+ rc = pChannel->start();
+ }
+
+ if (rc == NO_ERROR) {
+ // Set power Hint for video encoding
+ m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
+ }
+
+ LOGI("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stopRecording
+ *
+ * DESCRIPTION: stop recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopRecording()
+{
+ LOGI("E");
+ // stop snapshot channel
+ if (mParameters.isTNRSnapshotEnabled()) {
+ LOGH("STOP snapshot Channel for TNR processing");
+ stopChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ }
+ int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
+
+ m_cbNotifier.flushVideoNotifications();
+ // Disable power hint for video encoding
+ m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
+ mVideoMem = NULL;
+ LOGI("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseRecordingFrame
+ *
+ * DESCRIPTION: return video frame impl
+ *
+ * PARAMETERS :
+ * @opaque : ptr to video frame to be returned
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::releaseRecordingFrame(const void * opaque)
+{
+ int32_t rc = UNKNOWN_ERROR;
+ QCameraVideoChannel *pChannel =
+ (QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
+ LOGD("opaque data = %p",opaque);
+
+ if(pChannel != NULL) {
+ rc = pChannel->releaseFrame(opaque, mStoreMetaDataInFrame > 0);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : autoFocus
+ *
+ * DESCRIPTION: start auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::autoFocus()
+{
+ int rc = NO_ERROR;
+ cam_focus_mode_type focusMode = mParameters.getFocusMode();
+ LOGH("E");
+
+ switch (focusMode) {
+ case CAM_FOCUS_MODE_AUTO:
+ case CAM_FOCUS_MODE_MACRO:
+ case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+ case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+ mActiveAF = true;
+ LOGI("Send AUTO FOCUS event. focusMode=%d, m_currentFocusState=%d",
+ focusMode, m_currentFocusState);
+ rc = mCameraHandle->ops->do_auto_focus(mCameraHandle->camera_handle);
+ break;
+ case CAM_FOCUS_MODE_INFINITY:
+ case CAM_FOCUS_MODE_FIXED:
+ case CAM_FOCUS_MODE_EDOF:
+ default:
+ LOGI("No ops in focusMode (%d)", focusMode);
+ rc = sendEvtNotify(CAMERA_MSG_FOCUS, true, 0);
+ break;
+ }
+
+ if (NO_ERROR != rc) {
+ mActiveAF = false;
+ }
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancelAutoFocus
+ *
+ * DESCRIPTION: cancel auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelAutoFocus()
+{
+ int rc = NO_ERROR;
+ cam_focus_mode_type focusMode = mParameters.getFocusMode();
+
+ switch (focusMode) {
+ case CAM_FOCUS_MODE_AUTO:
+ case CAM_FOCUS_MODE_MACRO:
+ case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+ case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+ mActiveAF = false;
+ rc = mCameraHandle->ops->cancel_auto_focus(mCameraHandle->camera_handle);
+ break;
+ case CAM_FOCUS_MODE_INFINITY:
+ case CAM_FOCUS_MODE_FIXED:
+ case CAM_FOCUS_MODE_EDOF:
+ default:
+ LOGD("No ops in focusMode (%d)", focusMode);
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : processUFDumps
+ *
+ * DESCRIPTION: process UF jpeg dumps for refocus support
+ *
+ * PARAMETERS :
+ * @evt : payload of jpeg event, including information about jpeg encoding
+ * status, jpeg size and so on.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : none
+ *==========================================================================*/
+bool QCamera2HardwareInterface::processUFDumps(qcamera_jpeg_evt_payload_t *evt)
+{
+ bool ret = true;
+ if (mParameters.isUbiRefocus()) {
+ int index = (int)getOutputImageCount();
+ bool allFocusImage = (index == ((int)mParameters.getRefocusOutputCount() - 1));
+ char name[FILENAME_MAX];
+
+ camera_memory_t *jpeg_mem = NULL;
+ omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+ size_t dataLen;
+ uint8_t *dataPtr;
+ if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+ LOGE("Init PProc Deferred work failed");
+ return false;
+ }
+ if (!m_postprocessor.getJpegMemOpt()) {
+ dataLen = evt->out_data.buf_filled_len;
+ dataPtr = evt->out_data.buf_vaddr;
+ } else {
+ jpeg_out = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+ if (!jpeg_out) {
+ LOGE("Null pointer detected");
+ return false;
+ }
+ jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+ if (!jpeg_mem) {
+ LOGE("Null pointer detected");
+ return false;
+ }
+ dataPtr = (uint8_t *)jpeg_mem->data;
+ dataLen = jpeg_mem->size;
+ }
+
+ if (allFocusImage) {
+ snprintf(name, sizeof(name), "AllFocusImage");
+ index = -1;
+ } else {
+ snprintf(name, sizeof(name), "%d", 0);
+ }
+ CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"ubifocus", name, index, "jpg",
+ dataPtr, dataLen);
+ LOGD("Dump the image %d %d allFocusImage %d",
+ getOutputImageCount(), index, allFocusImage);
+ setOutputImageCount(getOutputImageCount() + 1);
+ if (!allFocusImage) {
+ ret = false;
+ }
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : unconfigureAdvancedCapture
+ *
+ * DESCRIPTION: unconfigure Advanced Capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::unconfigureAdvancedCapture()
+{
+ int32_t rc = NO_ERROR;
+
+ /*Disable Quadra CFA mode*/
+ LOGH("Disabling Quadra CFA mode");
+ mParameters.setQuadraCfaMode(false, true);
+
+ if (mAdvancedCaptureConfigured) {
+
+ mAdvancedCaptureConfigured = false;
+
+ if(mIs3ALocked) {
+ mParameters.set3ALock(false);
+ mIs3ALocked = false;
+ }
+ if (mParameters.isHDREnabled() || mParameters.isAEBracketEnabled()) {
+ rc = mParameters.setToneMapMode(true, true);
+ if (rc != NO_ERROR) {
+ LOGW("Failed to enable tone map during HDR/AEBracketing");
+ }
+ mHDRBracketingEnabled = false;
+ rc = mParameters.stopAEBracket();
+ } else if ((mParameters.isChromaFlashEnabled())
+ || (mFlashNeeded && !mLongshotEnabled)
+ || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+ || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+ rc = mParameters.resetFrameCapture(TRUE);
+ } else if (mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+ rc = configureAFBracketing(false);
+ } else if (mParameters.isOptiZoomEnabled()) {
+ rc = mParameters.setAndCommitZoom(mZoomLevel);
+ setDisplaySkip(FALSE, CAMERA_MAX_PARAM_APPLY_DELAY);
+ } else if (mParameters.isStillMoreEnabled()) {
+ cam_still_more_t stillmore_config = mParameters.getStillMoreSettings();
+ stillmore_config.burst_count = 0;
+ mParameters.setStillMoreSettings(stillmore_config);
+
+ /* If SeeMore is running, it will handle re-enabling tone map */
+ if (!mParameters.isSeeMoreEnabled() && !mParameters.isLTMForSeeMoreEnabled()) {
+ rc = mParameters.setToneMapMode(true, true);
+ if (rc != NO_ERROR) {
+ LOGW("Failed to enable tone map during StillMore");
+ }
+ }
+
+ /* Re-enable Tintless */
+ mParameters.setTintless(true);
+ } else {
+ LOGW("No Advanced Capture feature enabled!!");
+ rc = BAD_VALUE;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureAdvancedCapture
+ *
+ * DESCRIPTION: configure Advanced Capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAdvancedCapture()
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+
+ rc = mParameters.checkFeatureConcurrency();
+ if (rc != NO_ERROR) {
+ LOGE("Cannot support Advanced capture modes");
+ return rc;
+ }
+ /*Enable Quadra CFA mode*/
+ LOGH("Enabling Quadra CFA mode");
+ mParameters.setQuadraCfaMode(true, true);
+
+ setOutputImageCount(0);
+ mInputCount = 0;
+ mAdvancedCaptureConfigured = true;
+ /* Display should be disabled for advanced modes */
+ bool bSkipDisplay = true;
+
+ if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
+ // no Advance capture settings for Aux camera
+ LOGH("X Secondary Camera, no need to process!! ");
+ return rc;
+ }
+
+ /* Do not stop display if in stillmore livesnapshot */
+ if (mParameters.isStillMoreEnabled() &&
+ mParameters.isSeeMoreEnabled()) {
+ bSkipDisplay = false;
+ }
+ if (mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+ rc = configureAFBracketing();
+ } else if (mParameters.isOptiZoomEnabled()) {
+ rc = configureOptiZoom();
+ } else if(mParameters.isHDREnabled()) {
+ rc = configureHDRBracketing();
+ if (mHDRBracketingEnabled) {
+ rc = mParameters.setToneMapMode(false, true);
+ if (rc != NO_ERROR) {
+ LOGW("Failed to disable tone map during HDR");
+ }
+ }
+ } else if (mParameters.isAEBracketEnabled()) {
+ rc = mParameters.setToneMapMode(false, true);
+ if (rc != NO_ERROR) {
+ LOGW("Failed to disable tone map during AEBracketing");
+ }
+ rc = configureAEBracketing();
+ } else if (mParameters.isStillMoreEnabled()) {
+ rc = configureStillMore();
+ } else if ((mParameters.isChromaFlashEnabled())
+ || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+ || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+ rc = mParameters.configFrameCapture(TRUE);
+ } else if (mFlashNeeded && !mLongshotEnabled) {
+ rc = mParameters.configFrameCapture(TRUE);
+ bSkipDisplay = false;
+ } else {
+ LOGH("Advanced Capture feature not enabled!! ");
+ mAdvancedCaptureConfigured = false;
+ bSkipDisplay = false;
+ }
+
+ LOGH("Stop preview temporarily for advanced captures");
+ setDisplaySkip(bSkipDisplay);
+
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureAFBracketing
+ *
+ * DESCRIPTION: configure AF Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAFBracketing(bool enable)
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+ cam_af_bracketing_t *af_bracketing_need;
+
+ if (mParameters.isUbiRefocus()) {
+ af_bracketing_need =
+ &gCamCapability[mCameraId]->refocus_af_bracketing_need;
+ } else {
+ af_bracketing_need =
+ &gCamCapability[mCameraId]->ubifocus_af_bracketing_need;
+ }
+
+ //Enable AF Bracketing.
+ cam_af_bracketing_t afBracket;
+ memset(&afBracket, 0, sizeof(cam_af_bracketing_t));
+ afBracket.enable = enable;
+ afBracket.burst_count = af_bracketing_need->burst_count;
+
+ for(int8_t i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+ afBracket.focus_steps[i] = af_bracketing_need->focus_steps[i];
+ LOGH("focus_step[%d] = %d", i, afBracket.focus_steps[i]);
+ }
+ //Send cmd to backend to set AF Bracketing for Ubi Focus.
+ rc = mParameters.commitAFBracket(afBracket);
+ if ( NO_ERROR != rc ) {
+ LOGE("cannot configure AF bracketing");
+ return rc;
+ }
+ if (enable) {
+ mParameters.set3ALock(true);
+ mIs3ALocked = true;
+ }
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureHDRBracketing
+ *
+ * DESCRIPTION: configure HDR Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureHDRBracketing()
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+
+ cam_hdr_bracketing_info_t& hdrBracketingSetting =
+ gCamCapability[mCameraId]->hdr_bracketing_setting;
+
+ // 'values' should be in "idx1,idx2,idx3,..." format
+ uint32_t hdrFrameCount =
+ hdrBracketingSetting.num_frames;
+ LOGH("HDR values %d, %d frame count: %u",
+ (int8_t) hdrBracketingSetting.exp_val.values[0],
+ (int8_t) hdrBracketingSetting.exp_val.values[1],
+ hdrFrameCount);
+
+ // Enable AE Bracketing for HDR
+ cam_exp_bracketing_t aeBracket;
+ memset(&aeBracket, 0, sizeof(cam_exp_bracketing_t));
+ aeBracket.mode =
+ hdrBracketingSetting.exp_val.mode;
+
+ if (aeBracket.mode == CAM_EXP_BRACKETING_ON) {
+ mHDRBracketingEnabled = true;
+ }
+
+ String8 tmp;
+ for (uint32_t i = 0; i < hdrFrameCount; i++) {
+ tmp.appendFormat("%d",
+ (int8_t) hdrBracketingSetting.exp_val.values[i]);
+ tmp.append(",");
+ }
+ if (mParameters.isHDR1xFrameEnabled()
+ && mParameters.isHDR1xExtraBufferNeeded()) {
+ tmp.appendFormat("%d", 0);
+ tmp.append(",");
+ }
+
+ if( !tmp.isEmpty() &&
+ ( MAX_EXP_BRACKETING_LENGTH > tmp.length() ) ) {
+ //Trim last comma
+ memset(aeBracket.values, '\0', MAX_EXP_BRACKETING_LENGTH);
+ memcpy(aeBracket.values, tmp.string(), tmp.length() - 1);
+ }
+
+ LOGH("HDR config values %s",
+ aeBracket.values);
+ rc = mParameters.setHDRAEBracket(aeBracket);
+ if ( NO_ERROR != rc ) {
+ LOGE("cannot configure HDR bracketing");
+ return rc;
+ }
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureAEBracketing
+ *
+ * DESCRIPTION: configure AE Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAEBracketing()
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+
+ rc = mParameters.setAEBracketing();
+ if ( NO_ERROR != rc ) {
+ LOGE("cannot configure AE bracketing");
+ return rc;
+ }
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureOptiZoom
+ *
+ * DESCRIPTION: configure Opti Zoom.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureOptiZoom()
+{
+ int32_t rc = NO_ERROR;
+
+ //store current zoom level.
+ mZoomLevel = mParameters.getParmZoomLevel();
+
+ //set zoom level to 1x;
+ mParameters.setAndCommitZoom(0);
+
+ mParameters.set3ALock(true);
+ mIs3ALocked = true;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureStillMore
+ *
+ * DESCRIPTION: configure StillMore.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureStillMore()
+{
+ int32_t rc = NO_ERROR;
+ uint8_t burst_cnt = 0;
+ cam_still_more_t stillmore_config;
+ cam_still_more_t stillmore_cap;
+
+ /* Disable Tone Map. If seemore is enabled, it will handle disabling it. */
+ if (!mParameters.isSeeMoreEnabled() && !mParameters.isLTMForSeeMoreEnabled()) {
+ rc = mParameters.setToneMapMode(false, true);
+ if (rc != NO_ERROR) {
+ LOGW("Failed to disable tone map during StillMore");
+ }
+ }
+
+ /* Lock 3A */
+ mParameters.set3ALock(true);
+ mIs3ALocked = true;
+
+ /* Disable Tintless */
+ mParameters.setTintless(false);
+
+ /* Initialize burst count from capability */
+ stillmore_cap = mParameters.getStillMoreCapability();
+ burst_cnt = stillmore_cap.max_burst_count;
+
+ /* Reconfigure burst count from dynamic scene data */
+ cam_dyn_img_data_t dynamic_img_data = mParameters.getDynamicImgData();
+ if (dynamic_img_data.input_count >= stillmore_cap.min_burst_count &&
+ dynamic_img_data.input_count <= stillmore_cap.max_burst_count) {
+ burst_cnt = dynamic_img_data.input_count;
+ }
+
+ /* Reconfigure burst count in the case of liveshot */
+ if (mParameters.isSeeMoreEnabled()) {
+ burst_cnt = 1;
+ }
+
+ /* Reconfigure burst count from user input */
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.imglib.stillmore", prop, "0");
+ uint8_t burst_setprop = (uint32_t)atoi(prop);
+ if (burst_setprop != 0) {
+ if ((burst_setprop < stillmore_cap.min_burst_count) ||
+ (burst_setprop > stillmore_cap.max_burst_count)) {
+ burst_cnt = stillmore_cap.max_burst_count;
+ } else {
+ burst_cnt = burst_setprop;
+ }
+ }
+
+ memset(&stillmore_config, 0, sizeof(cam_still_more_t));
+ stillmore_config.burst_count = burst_cnt;
+ mParameters.setStillMoreSettings(stillmore_config);
+
+ LOGH("Stillmore burst %d", burst_cnt);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stopAdvancedCapture
+ *
+ * DESCRIPTION: stops advanced capture based on capture type
+ *
+ * PARAMETERS :
+ * @pChannel : channel.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopAdvancedCapture(
+ QCameraPicChannel *pChannel)
+{
+ LOGH("stop bracketig");
+ int32_t rc = NO_ERROR;
+
+ if(mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+ rc = pChannel->stopAdvancedCapture(MM_CAMERA_AF_BRACKETING);
+ } else if (mParameters.isChromaFlashEnabled()
+ || (mFlashNeeded && !mLongshotEnabled)
+ || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+ || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+ rc = pChannel->stopAdvancedCapture(MM_CAMERA_FRAME_CAPTURE);
+ } else if(mParameters.isHDREnabled()
+ || mParameters.isAEBracketEnabled()) {
+ rc = pChannel->stopAdvancedCapture(MM_CAMERA_AE_BRACKETING);
+ } else if (mParameters.isOptiZoomEnabled()) {
+ rc = pChannel->stopAdvancedCapture(MM_CAMERA_ZOOM_1X);
+ } else if (mParameters.isStillMoreEnabled()) {
+ LOGH("stopAdvancedCapture not needed for StillMore");
+ } else {
+ LOGH("No Advanced Capture feature enabled!");
+ rc = BAD_VALUE;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : startAdvancedCapture
+ *
+ * DESCRIPTION: starts advanced capture based on capture type
+ *
+ * PARAMETERS :
+ * @pChannel : channel.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startAdvancedCapture(
+ QCameraPicChannel *pChannel)
+{
+ LOGH("Start bracketing");
+ int32_t rc = NO_ERROR;
+
+ if(mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+ rc = pChannel->startAdvancedCapture(MM_CAMERA_AF_BRACKETING);
+ } else if (mParameters.isOptiZoomEnabled()) {
+ rc = pChannel->startAdvancedCapture(MM_CAMERA_ZOOM_1X);
+ } else if (mParameters.isStillMoreEnabled()) {
+ LOGH("startAdvancedCapture not needed for StillMore");
+ } else if (mParameters.isHDREnabled()
+ || mParameters.isAEBracketEnabled()) {
+ rc = pChannel->startAdvancedCapture(MM_CAMERA_AE_BRACKETING);
+ } else if (mParameters.isChromaFlashEnabled()
+ || (mFlashNeeded && !mLongshotEnabled)
+ || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+ || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+ cam_capture_frame_config_t config = mParameters.getCaptureFrameConfig();
+ rc = pChannel->startAdvancedCapture(MM_CAMERA_FRAME_CAPTURE, &config);
+ } else {
+ LOGE("No Advanced Capture feature enabled!");
+ rc = BAD_VALUE;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : preTakePicture
+ *
+ * DESCRIPTION: Prepare take picture impl, Restarts preview if necessary
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::preTakePicture()
+{
+ int32_t rc = NO_ERROR;
+ LOGH("E");
+ if (mParameters.getRecordingHintValue() == true) {
+
+ // Give HWI control to restart preview only in single camera mode.
+ // In dual-cam mode, this control belongs to muxer.
+ if (getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+ LOGH("restart preview if rec hint is true and preview is running");
+ stopPreview();
+ mParameters.updateRecordingHintValue(FALSE);
+ // start preview again
+ rc = preparePreview();
+ if (rc == NO_ERROR) {
+ rc = startPreview();
+ if (rc != NO_ERROR) {
+ unpreparePreview();
+ }
+ }
+ }
+ else
+ {
+ // For dual cam mode, update the flag mPreviewRestartNeeded to true
+ // Restart control will be handled by muxer.
+ mPreviewRestartNeeded = true;
+ }
+ }
+
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : takePicture
+ *
+ * DESCRIPTION: take picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePicture()
+{
+ int rc = NO_ERROR;
+
+ // Get total number for snapshots (retro + regular)
+ uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+ // Get number of retro-active snapshots
+ uint8_t numRetroSnapshots = mParameters.getNumOfRetroSnapshots();
+ LOGH("E");
+
+ //Set rotation value from user settings as Jpeg rotation
+ //to configure back-end modules.
+ mParameters.setJpegRotation(mParameters.getRotation());
+
+ // Check if retro-active snapshots are not enabled
+ if (!isRetroPicture() || !mParameters.isZSLMode()) {
+ numRetroSnapshots = 0;
+ LOGH("Reset retro snaphot count to zero");
+ }
+
+ //Do special configure for advanced capture modes.
+ rc = configureAdvancedCapture();
+ if (rc != NO_ERROR) {
+ LOGE("Unsupported capture call");
+ return rc;
+ }
+
+ if (mAdvancedCaptureConfigured) {
+ numSnapshots = mParameters.getBurstCountForAdvancedCapture();
+ }
+ LOGI("snap count = %d zsl = %d advanced = %d",
+ numSnapshots, mParameters.isZSLMode(), mAdvancedCaptureConfigured);
+
+ if (mParameters.isZSLMode()) {
+ QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+ QCameraPicChannel *pPicChannel = (QCameraPicChannel *)pChannel;
+ if (NULL != pPicChannel) {
+
+ if (mParameters.getofflineRAW()) {
+ startRAWChannel(pPicChannel);
+ pPicChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_RAW];
+ if (pPicChannel == NULL) {
+ LOGE("RAW Channel is NULL in Manual capture mode");
+ stopRAWChannel();
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ rc = configureOnlineRotation(*pPicChannel);
+ if (rc != NO_ERROR) {
+ LOGE("online rotation failed");
+ return rc;
+ }
+
+ // start postprocessor
+ DeferWorkArgs args;
+ memset(&args, 0, sizeof(DeferWorkArgs));
+
+ args.pprocArgs = pPicChannel;
+
+ // No need to wait for mInitPProcJob here, because it was
+ // queued in startPreview, and will definitely be processed before
+ // mReprocJob can begin.
+ mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+ args);
+ if (mReprocJob == 0) {
+ LOGE("Failure: Unable to start pproc");
+ return -ENOMEM;
+ }
+
+ // Check if all preview buffers are mapped before creating
+ // a jpeg session as preview stream buffers are queried during the same
+ uint8_t numStreams = pChannel->getNumOfStreams();
+ QCameraStream *pStream = NULL;
+ QCameraStream *pPreviewStream = NULL;
+ for (uint8_t i = 0 ; i < numStreams ; i++ ) {
+ pStream = pChannel->getStreamByIndex(i);
+ if (!pStream)
+ continue;
+ if (CAM_STREAM_TYPE_PREVIEW == pStream->getMyType()) {
+ pPreviewStream = pStream;
+ break;
+ }
+ }
+ if (pPreviewStream != NULL) {
+ Mutex::Autolock l(mMapLock);
+ QCameraMemory *pMemory = pStream->getStreamBufs();
+ if (!pMemory) {
+ LOGE("Error!! pMemory is NULL");
+ return -ENOMEM;
+ }
+
+ uint8_t waitCnt = 2;
+ while (!pMemory->checkIfAllBuffersMapped() && (waitCnt > 0)) {
+ LOGL(" Waiting for preview buffers to be mapped");
+ mMapCond.waitRelative(
+ mMapLock, CAMERA_DEFERRED_MAP_BUF_TIMEOUT);
+ LOGL("Wait completed!!");
+ waitCnt--;
+ }
+ // If all buffers are not mapped after retries, assert
+ assert(pMemory->checkIfAllBuffersMapped());
+ } else {
+ assert(pPreviewStream);
+ }
+
+ // Create JPEG session
+ mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+ args);
+ if (mJpegJob == 0) {
+ LOGE("Failed to queue CREATE_JPEG_SESSION");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work was failed");
+ }
+ m_postprocessor.stop();
+ return -ENOMEM;
+ }
+
+ if (mAdvancedCaptureConfigured) {
+ rc = startAdvancedCapture(pPicChannel);
+ if (rc != NO_ERROR) {
+ LOGE("cannot start zsl advanced capture");
+ return rc;
+ }
+ }
+ if (mLongshotEnabled && mPrepSnapRun) {
+ mCameraHandle->ops->start_zsl_snapshot(
+ mCameraHandle->camera_handle,
+ pPicChannel->getMyHandle());
+ }
+ // If frame sync is ON and it is a SECONDARY camera,
+ // we do not need to send the take picture command to interface
+ // It will be handled along with PRIMARY camera takePicture request
+ mm_camera_req_buf_t buf;
+ memset(&buf, 0x0, sizeof(buf));
+ if ((!mParameters.isAdvCamFeaturesEnabled() &&
+ !mFlashNeeded &&
+ !isLongshotEnabled() &&
+ isFrameSyncEnabled()) &&
+ (getRelatedCamSyncInfo()->sync_control ==
+ CAM_SYNC_RELATED_SENSORS_ON)) {
+ if (getRelatedCamSyncInfo()->mode == CAM_MODE_PRIMARY) {
+ buf.type = MM_CAMERA_REQ_FRAME_SYNC_BUF;
+ buf.num_buf_requested = numSnapshots;
+ rc = pPicChannel->takePicture(&buf);
+ if (rc != NO_ERROR) {
+ LOGE("FS_DBG cannot take ZSL picture, stop pproc");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+ LOGE("Jpeg Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ m_postprocessor.stop();
+ return rc;
+ }
+ LOGI("PRIMARY camera: send frame sync takePicture!!");
+ }
+ } else {
+ buf.type = MM_CAMERA_REQ_SUPER_BUF;
+ buf.num_buf_requested = numSnapshots;
+ buf.num_retro_buf_requested = numRetroSnapshots;
+ rc = pPicChannel->takePicture(&buf);
+ if (rc != NO_ERROR) {
+ LOGE("cannot take ZSL picture, stop pproc");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+ LOGE("Jpeg Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ m_postprocessor.stop();
+ return rc;
+ }
+ }
+ } else {
+ LOGE("ZSL channel is NULL");
+ return UNKNOWN_ERROR;
+ }
+ } else {
+
+ // start snapshot
+ if (mParameters.isJpegPictureFormat() ||
+ mParameters.isNV16PictureFormat() ||
+ mParameters.isNV21PictureFormat()) {
+
+ //STOP Preview for Non ZSL use case
+ stopPreview();
+
+ //Config CAPTURE channels
+ rc = declareSnapshotStreams();
+ if (NO_ERROR != rc) {
+ return rc;
+ }
+
+ rc = addCaptureChannel();
+ if ((rc == NO_ERROR) &&
+ (NULL != m_channels[QCAMERA_CH_TYPE_CAPTURE])) {
+
+ if (!mParameters.getofflineRAW()) {
+ rc = configureOnlineRotation(
+ *m_channels[QCAMERA_CH_TYPE_CAPTURE]);
+ if (rc != NO_ERROR) {
+ LOGE("online rotation failed");
+ delChannel(QCAMERA_CH_TYPE_CAPTURE);
+ return rc;
+ }
+ }
+
+ DeferWorkArgs args;
+ memset(&args, 0, sizeof(DeferWorkArgs));
+
+ args.pprocArgs = m_channels[QCAMERA_CH_TYPE_CAPTURE];
+
+ // No need to wait for mInitPProcJob here, because it was
+ // queued in startPreview, and will definitely be processed before
+ // mReprocJob can begin.
+ mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+ args);
+ if (mReprocJob == 0) {
+ LOGE("Failure: Unable to start pproc");
+ return -ENOMEM;
+ }
+
+ // Create JPEG session
+ mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+ args);
+ if (mJpegJob == 0) {
+ LOGE("Failed to queue CREATE_JPEG_SESSION");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work was failed");
+ }
+ m_postprocessor.stop();
+ return -ENOMEM;
+ }
+
+ // start catpure channel
+ rc = m_channels[QCAMERA_CH_TYPE_CAPTURE]->start();
+ if (rc != NO_ERROR) {
+ LOGE("cannot start capture channel");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+ LOGE("Jpeg Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ delChannel(QCAMERA_CH_TYPE_CAPTURE);
+ return rc;
+ }
+
+ QCameraPicChannel *pCapChannel =
+ (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
+ if (NULL != pCapChannel) {
+ if (mParameters.isUbiFocusEnabled() ||
+ mParameters.isUbiRefocus() ||
+ mParameters.isChromaFlashEnabled()) {
+ rc = startAdvancedCapture(pCapChannel);
+ if (rc != NO_ERROR) {
+ LOGE("cannot start advanced capture");
+ return rc;
+ }
+ }
+ }
+ if ( mLongshotEnabled ) {
+ rc = longShot();
+ if (NO_ERROR != rc) {
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+ LOGE("Jpeg Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ delChannel(QCAMERA_CH_TYPE_CAPTURE);
+ return rc;
+ }
+ }
+ } else {
+ LOGE("cannot add capture channel");
+ delChannel(QCAMERA_CH_TYPE_CAPTURE);
+ return rc;
+ }
+ } else {
+ // Stop Preview before taking NZSL snapshot
+ stopPreview();
+
+ rc = mParameters.updateRAW(gCamCapability[mCameraId]->raw_dim[0]);
+ if (NO_ERROR != rc) {
+ LOGE("Raw dimension update failed %d", rc);
+ return rc;
+ }
+
+ rc = declareSnapshotStreams();
+ if (NO_ERROR != rc) {
+ LOGE("RAW stream info configuration failed %d", rc);
+ return rc;
+ }
+
+ rc = addChannel(QCAMERA_CH_TYPE_RAW);
+ if (rc == NO_ERROR) {
+ // start postprocessor
+ if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+ LOGE("Reprocess Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+
+ rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
+ if (rc != NO_ERROR) {
+ LOGE("cannot start postprocessor");
+ delChannel(QCAMERA_CH_TYPE_RAW);
+ return rc;
+ }
+
+ rc = startChannel(QCAMERA_CH_TYPE_RAW);
+ if (rc != NO_ERROR) {
+ LOGE("cannot start raw channel");
+ m_postprocessor.stop();
+ delChannel(QCAMERA_CH_TYPE_RAW);
+ return rc;
+ }
+ } else {
+ LOGE("cannot add raw channel");
+ return rc;
+ }
+ }
+ }
+
+ //When take picture, stop sending preview callbacks to APP
+ m_stateMachine.setPreviewCallbackNeeded(false);
+ LOGI("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureOnlineRotation
+ *
+ * DESCRIPTION: Configure backend with expected rotation for snapshot stream
+ *
+ * PARAMETERS :
+ * @ch : Channel containing a snapshot stream
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureOnlineRotation(QCameraChannel &ch)
+{
+ int rc = NO_ERROR;
+ uint32_t streamId = 0;
+ QCameraStream *pStream = NULL;
+
+ for (uint8_t i = 0; i < ch.getNumOfStreams(); i++) {
+ QCameraStream *stream = ch.getStreamByIndex(i);
+ if ((NULL != stream) &&
+ ((CAM_STREAM_TYPE_SNAPSHOT == stream->getMyType())
+ || (CAM_STREAM_TYPE_RAW == stream->getMyType()))) {
+ pStream = stream;
+ break;
+ }
+ }
+
+ if (NULL == pStream) {
+ LOGE("No snapshot stream found!");
+ return BAD_VALUE;
+ }
+
+ streamId = pStream->getMyServerID();
+ // Update online rotation configuration
+ rc = mParameters.addOnlineRotation(mParameters.getJpegRotation(), streamId,
+ mParameters.getDeviceRotation());
+ if (rc != NO_ERROR) {
+ LOGE("addOnlineRotation failed %d", rc);
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : declareSnapshotStreams
+ *
+ * DESCRIPTION: Configure backend with expected snapshot streams
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::declareSnapshotStreams()
+{
+ int rc = NO_ERROR;
+
+ // Update stream info configuration
+ rc = mParameters.setStreamConfigure(true, mLongshotEnabled, false);
+ if (rc != NO_ERROR) {
+ LOGE("setStreamConfigure failed %d", rc);
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : longShot
+ *
+ * DESCRIPTION: Queue one more ZSL frame
+ * in the longshot pipe.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::longShot()
+{
+ int32_t rc = NO_ERROR;
+ uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+ QCameraPicChannel *pChannel = NULL;
+
+ if (mParameters.isZSLMode()) {
+ pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+ } else {
+ pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
+ }
+
+ if (NULL != pChannel) {
+ mm_camera_req_buf_t buf;
+ memset(&buf, 0x0, sizeof(buf));
+ buf.type = MM_CAMERA_REQ_SUPER_BUF;
+ buf.num_buf_requested = numSnapshots;
+ rc = pChannel->takePicture(&buf);
+ } else {
+ LOGE("Capture channel not initialized!");
+ rc = NO_INIT;
+ goto end;
+ }
+
+end:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stopCaptureChannel
+ *
+ * DESCRIPTION: Stops capture channel
+ *
+ * PARAMETERS :
+ * @destroy : Set to true to stop and delete camera channel.
+ * Set to false to only stop capture channel.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopCaptureChannel(bool destroy)
+{
+ int rc = NO_ERROR;
+ if (mParameters.isJpegPictureFormat() ||
+ mParameters.isNV16PictureFormat() ||
+ mParameters.isNV21PictureFormat()) {
+ mParameters.setQuadraCfaMode(false, true);
+ rc = stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+ if (destroy && (NO_ERROR == rc)) {
+ // Destroy camera channel but dont release context
+ waitDeferredWork(mJpegJob);
+ rc = delChannel(QCAMERA_CH_TYPE_CAPTURE, false);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancelPicture
+ *
+ * DESCRIPTION: cancel picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelPicture()
+{
+ waitDeferredWork(mReprocJob);
+ waitDeferredWork(mJpegJob);
+
+ //stop post processor
+ m_postprocessor.stop();
+
+ unconfigureAdvancedCapture();
+ LOGH("Enable display frames again");
+ setDisplaySkip(FALSE);
+
+ if (!mLongshotEnabled) {
+ m_perfLock.lock_rel();
+ }
+
+ if (mParameters.isZSLMode()) {
+ QCameraPicChannel *pPicChannel = NULL;
+ if (mParameters.getofflineRAW()) {
+ pPicChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_RAW];
+ } else {
+ pPicChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+ }
+ if (NULL != pPicChannel) {
+ pPicChannel->cancelPicture();
+ stopRAWChannel();
+ stopAdvancedCapture(pPicChannel);
+ }
+ } else {
+
+ // normal capture case
+ if (mParameters.isJpegPictureFormat() ||
+ mParameters.isNV16PictureFormat() ||
+ mParameters.isNV21PictureFormat()) {
+ stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+ delChannel(QCAMERA_CH_TYPE_CAPTURE);
+ } else {
+ stopChannel(QCAMERA_CH_TYPE_RAW);
+ delChannel(QCAMERA_CH_TYPE_RAW);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : captureDone
+ *
+ * DESCRIPTION: Function called when the capture is completed before encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::captureDone()
+{
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE;
+ int32_t rc = processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGE("processEvt ZSL capture done failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for ZSL capture done event");
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : Live_Snapshot_thread
+ *
+ * DESCRIPTION: Seperate thread for taking live snapshot during recording
+ *
+ * PARAMETERS : @data - pointer to QCamera2HardwareInterface class object
+ *
+ * RETURN : none
+ *==========================================================================*/
+void* Live_Snapshot_thread (void* data)
+{
+
+ QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(data);
+ if (!hw) {
+ LOGE("take_picture_thread: NULL camera device");
+ return (void *)BAD_VALUE;
+ }
+ if (hw->bLiveSnapshot) {
+ hw->takeLiveSnapshot_internal();
+ } else {
+ hw->cancelLiveSnapshot_internal();
+ }
+ return (void* )NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : Int_Pic_thread
+ *
+ * DESCRIPTION: Seperate thread for taking snapshot triggered by camera backend
+ *
+ * PARAMETERS : @data - pointer to QCamera2HardwareInterface class object
+ *
+ * RETURN : none
+ *==========================================================================*/
+void* Int_Pic_thread (void* data)
+{
+ int rc = NO_ERROR;
+
+ QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(data);
+
+ if (!hw) {
+ LOGE("take_picture_thread: NULL camera device");
+ return (void *)BAD_VALUE;
+ }
+
+ bool JpegMemOpt = false;
+ char raw_format[PROPERTY_VALUE_MAX];
+
+ memset(raw_format, 0, sizeof(raw_format));
+
+ rc = hw->takeBackendPic_internal(&JpegMemOpt, &raw_format[0]);
+ if (rc == NO_ERROR) {
+ hw->checkIntPicPending(JpegMemOpt, &raw_format[0]);
+ } else {
+ //Snapshot attempt not successful, we need to do cleanup here
+ hw->clearIntPendingEvents();
+ }
+
+ return (void* )NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : takeLiveSnapshot
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot()
+{
+ int rc = NO_ERROR;
+ if (mLiveSnapshotThread != 0) {
+ pthread_join(mLiveSnapshotThread,NULL);
+ mLiveSnapshotThread = 0;
+ }
+ bLiveSnapshot = true;
+ rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
+ if (!rc) {
+ pthread_setname_np(mLiveSnapshotThread, "CAM_liveSnap");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : takePictureInternal
+ *
+ * DESCRIPTION: take snapshot triggered by backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePictureInternal()
+{
+ int rc = NO_ERROR;
+ rc= pthread_create(&mIntPicThread, NULL, Int_Pic_thread, (void *) this);
+ if (!rc) {
+ pthread_setname_np(mIntPicThread, "CAM_IntPic");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : checkIntPicPending
+ *
+ * DESCRIPTION: timed wait for jpeg completion event, and send
+ * back completion event to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::checkIntPicPending(bool JpegMemOpt, char *raw_format)
+{
+ bool bSendToBackend = true;
+ cam_int_evt_params_t params;
+ int rc = NO_ERROR;
+
+ struct timespec ts;
+ struct timeval tp;
+ gettimeofday(&tp, NULL);
+ ts.tv_sec = tp.tv_sec + 5;
+ ts.tv_nsec = tp.tv_usec * 1000;
+
+ if (true == m_bIntJpegEvtPending ||
+ (true == m_bIntRawEvtPending)) {
+ //Waiting in HAL for snapshot taken notification
+ pthread_mutex_lock(&m_int_lock);
+ rc = pthread_cond_timedwait(&m_int_cond, &m_int_lock, &ts);
+ if (ETIMEDOUT == rc || 0x0 == m_BackendFileName[0]) {
+ //Hit a timeout, or some spurious activity
+ bSendToBackend = false;
+ }
+
+ if (true == m_bIntJpegEvtPending) {
+ params.event_type = 0;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT, params.picture_format);
+ } else if (true == m_bIntRawEvtPending) {
+ params.event_type = 1;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_RAW, params.picture_format);
+ }
+ pthread_mutex_unlock(&m_int_lock);
+
+ if (true == m_bIntJpegEvtPending) {
+ //Attempting to restart preview after taking JPEG snapshot
+ lockAPI();
+ rc = processAPI(QCAMERA_SM_EVT_SNAPSHOT_DONE, NULL);
+ unlockAPI();
+ m_postprocessor.setJpegMemOpt(JpegMemOpt);
+ } else if (true == m_bIntRawEvtPending) {
+ //Attempting to restart preview after taking RAW snapshot
+ stopChannel(QCAMERA_CH_TYPE_RAW);
+ delChannel(QCAMERA_CH_TYPE_RAW);
+ //restoring the old raw format
+ property_set("persist.camera.raw.format", raw_format);
+ }
+
+ if (true == bSendToBackend) {
+ //send event back to server with the file path
+ params.dim = m_postprocessor.m_dst_dim;
+ memcpy(&params.path[0], &m_BackendFileName[0], QCAMERA_MAX_FILEPATH_LENGTH);
+ memset(&m_BackendFileName[0], 0x0, QCAMERA_MAX_FILEPATH_LENGTH);
+ params.size = mBackendFileSize;
+ rc = mParameters.setIntEvent(params);
+ }
+
+ clearIntPendingEvents();
+ }
+
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : takeBackendPic_internal
+ *
+ * DESCRIPTION: take snapshot triggered by backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeBackendPic_internal(bool *JpegMemOpt, char *raw_format)
+{
+ int rc = NO_ERROR;
+ qcamera_api_result_t apiResult;
+
+ lockAPI();
+ //Set rotation value from user settings as Jpeg rotation
+ //to configure back-end modules.
+ mParameters.setJpegRotation(mParameters.getRotation());
+
+ setRetroPicture(0);
+ /* Prepare snapshot in case LED needs to be flashed */
+ if (mFlashNeeded == 1 || mParameters.isChromaFlashEnabled()) {
+ // Start Preparing for normal Frames
+ LOGH("Start Prepare Snapshot");
+ /* Prepare snapshot in case LED needs to be flashed */
+ rc = processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
+ if (rc == NO_ERROR) {
+ waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
+ rc = apiResult.status;
+ }
+ LOGH("Prep Snapshot done rc = %d", rc);
+ mPrepSnapRun = true;
+ }
+ unlockAPI();
+
+ if (true == m_bIntJpegEvtPending) {
+ //Attempting to take JPEG snapshot
+ if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+ LOGE("Init PProc Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ *JpegMemOpt = m_postprocessor.getJpegMemOpt();
+ m_postprocessor.setJpegMemOpt(false);
+
+ /* capture */
+ lockAPI();
+ LOGH("Capturing internal snapshot");
+ rc = processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+ if (rc == NO_ERROR) {
+ waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+ rc = apiResult.status;
+ }
+ unlockAPI();
+ } else if (true == m_bIntRawEvtPending) {
+ //Attempting to take RAW snapshot
+ (void)JpegMemOpt;
+ stopPreview();
+
+ //getting the existing raw format type
+ property_get("persist.camera.raw.format", raw_format, "17");
+ //setting it to a default know value for this task
+ property_set("persist.camera.raw.format", "18");
+
+ rc = addChannel(QCAMERA_CH_TYPE_RAW);
+ if (rc == NO_ERROR) {
+ // start postprocessor
+ if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+ LOGE("Init PProc Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
+ if (rc != NO_ERROR) {
+ LOGE("cannot start postprocessor");
+ delChannel(QCAMERA_CH_TYPE_RAW);
+ return rc;
+ }
+
+ rc = startChannel(QCAMERA_CH_TYPE_RAW);
+ if (rc != NO_ERROR) {
+ LOGE("cannot start raw channel");
+ m_postprocessor.stop();
+ delChannel(QCAMERA_CH_TYPE_RAW);
+ return rc;
+ }
+ } else {
+ LOGE("cannot add raw channel");
+ return rc;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : clearIntPendingEvents
+ *
+ * DESCRIPTION: clear internal pending events pertaining to backend
+ * snapshot requests
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+void QCamera2HardwareInterface::clearIntPendingEvents()
+{
+ int rc = NO_ERROR;
+
+ if (true == m_bIntRawEvtPending) {
+ preparePreview();
+ startPreview();
+ }
+ if (true == m_bIntJpegEvtPending) {
+ if (false == mParameters.isZSLMode()) {
+ lockAPI();
+ rc = processAPI(QCAMERA_SM_EVT_START_PREVIEW, NULL);
+ unlockAPI();
+ }
+ }
+
+ pthread_mutex_lock(&m_int_lock);
+ if (true == m_bIntJpegEvtPending) {
+ m_bIntJpegEvtPending = false;
+ } else if (true == m_bIntRawEvtPending) {
+ m_bIntRawEvtPending = false;
+ }
+ pthread_mutex_unlock(&m_int_lock);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : takeLiveSnapshot_internal
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot_internal()
+{
+ int rc = NO_ERROR;
+
+ QCameraChannel *pChannel = NULL;
+ QCameraChannel *pPreviewChannel = NULL;
+ QCameraStream *pPreviewStream = NULL;
+ QCameraStream *pStream = NULL;
+
+ //Set rotation value from user settings as Jpeg rotation
+ //to configure back-end modules.
+ mParameters.setJpegRotation(mParameters.getRotation());
+
+ // Configure advanced capture
+ rc = configureAdvancedCapture();
+ if (rc != NO_ERROR) {
+ LOGE("Unsupported capture call");
+ goto end;
+ }
+
+ if (isLowPowerMode()) {
+ pChannel = m_channels[QCAMERA_CH_TYPE_VIDEO];
+ } else {
+ pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+ }
+
+ if (NULL == pChannel) {
+ LOGE("Snapshot/Video channel not initialized");
+ rc = NO_INIT;
+ goto end;
+ }
+
+ // Check if all preview buffers are mapped before creating
+ // a jpeg session as preview stream buffers are queried during the same
+ pPreviewChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+ if (pPreviewChannel != NULL) {
+ uint32_t numStreams = pPreviewChannel->getNumOfStreams();
+
+ for (uint8_t i = 0 ; i < numStreams ; i++ ) {
+ pStream = pPreviewChannel->getStreamByIndex(i);
+ if (!pStream)
+ continue;
+ if (CAM_STREAM_TYPE_PREVIEW == pStream->getMyType()) {
+ pPreviewStream = pStream;
+ break;
+ }
+ }
+
+ if (pPreviewStream != NULL) {
+ Mutex::Autolock l(mMapLock);
+ QCameraMemory *pMemory = pStream->getStreamBufs();
+ if (!pMemory) {
+ LOGE("Error!! pMemory is NULL");
+ return -ENOMEM;
+ }
+
+ uint8_t waitCnt = 2;
+ while (!pMemory->checkIfAllBuffersMapped() && (waitCnt > 0)) {
+ LOGL(" Waiting for preview buffers to be mapped");
+ mMapCond.waitRelative(
+ mMapLock, CAMERA_DEFERRED_MAP_BUF_TIMEOUT);
+ LOGL("Wait completed!!");
+ waitCnt--;
+ }
+ // If all buffers are not mapped after retries, assert
+ assert(pMemory->checkIfAllBuffersMapped());
+ } else {
+ assert(pPreviewStream);
+ }
+ }
+
+ DeferWorkArgs args;
+ memset(&args, 0, sizeof(DeferWorkArgs));
+
+ args.pprocArgs = pChannel;
+
+ // No need to wait for mInitPProcJob here, because it was
+ // queued in startPreview, and will definitely be processed before
+ // mReprocJob can begin.
+ mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+ args);
+ if (mReprocJob == 0) {
+ LOGE("Failed to queue CMD_DEF_PPROC_START");
+ rc = -ENOMEM;
+ goto end;
+ }
+
+ // Create JPEG session
+ mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+ args);
+ if (mJpegJob == 0) {
+ LOGE("Failed to queue CREATE_JPEG_SESSION");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work was failed");
+ }
+ m_postprocessor.stop();
+ rc = -ENOMEM;
+ goto end;
+ }
+
+ if (isLowPowerMode()) {
+ mm_camera_req_buf_t buf;
+ memset(&buf, 0x0, sizeof(buf));
+ buf.type = MM_CAMERA_REQ_SUPER_BUF;
+ buf.num_buf_requested = 1;
+ rc = ((QCameraVideoChannel*)pChannel)->takePicture(&buf);
+ goto end;
+ }
+
+ //Disable reprocess for 4K liveshot case
+ if (!mParameters.is4k2kVideoResolution()) {
+ rc = configureOnlineRotation(*m_channels[QCAMERA_CH_TYPE_SNAPSHOT]);
+ if (rc != NO_ERROR) {
+ LOGE("online rotation failed");
+ if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+ LOGE("Reprocess Deferred work was failed");
+ }
+ if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+ LOGE("Jpeg Deferred work was failed");
+ }
+ m_postprocessor.stop();
+ return rc;
+ }
+ }
+
+ if ((NULL != pChannel) && (mParameters.isTNRSnapshotEnabled())) {
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0 ; i < pChannel->getNumOfStreams(); i++ ) {
+ pStream = pChannel->getStreamByIndex(i);
+ if ((NULL != pStream) &&
+ (CAM_STREAM_TYPE_SNAPSHOT == pStream->getMyType())) {
+ break;
+ }
+ }
+ if (pStream != NULL) {
+ LOGD("REQUEST_FRAMES event for TNR snapshot");
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+ param.type = CAM_STREAM_PARAM_TYPE_REQUEST_FRAMES;
+ param.frameRequest.enableStream = 1;
+ rc = pStream->setParameter(param);
+ if (rc != NO_ERROR) {
+ LOGE("Stream Event REQUEST_FRAMES failed");
+ }
+ goto end;
+ }
+ }
+
+ // start snapshot channel
+ if ((rc == NO_ERROR) && (NULL != pChannel)) {
+ // Do not link metadata stream for 4K2k resolution
+ // as CPP processing would be done on snapshot stream and not
+ // reprocess stream
+ if (!mParameters.is4k2kVideoResolution()) {
+ // Find and try to link a metadata stream from preview channel
+ QCameraChannel *pMetaChannel = NULL;
+ QCameraStream *pMetaStream = NULL;
+ QCameraStream *pPreviewStream = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+ pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+ uint32_t streamNum = pMetaChannel->getNumOfStreams();
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+ pStream = pMetaChannel->getStreamByIndex(i);
+ if (NULL != pStream) {
+ if (CAM_STREAM_TYPE_METADATA == pStream->getMyType()) {
+ pMetaStream = pStream;
+ } else if (CAM_STREAM_TYPE_PREVIEW == pStream->getMyType()) {
+ pPreviewStream = pStream;
+ }
+ }
+ }
+ }
+
+ if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+ rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+ if (NO_ERROR != rc) {
+ LOGE("Metadata stream link failed %d", rc);
+ }
+ }
+ if ((NULL != pMetaChannel) && (NULL != pPreviewStream)) {
+ rc = pChannel->linkStream(pMetaChannel, pPreviewStream);
+ if (NO_ERROR != rc) {
+ LOGE("Preview stream link failed %d", rc);
+ }
+ }
+ }
+ rc = pChannel->start();
+ }
+
+end:
+ if (rc != NO_ERROR) {
+ rc = processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+ rc = sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancelLiveSnapshot
+ *
+ * DESCRIPTION: cancel current live snapshot request
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot()
+{
+ int rc = NO_ERROR;
+ if (mLiveSnapshotThread != 0) {
+ pthread_join(mLiveSnapshotThread,NULL);
+ mLiveSnapshotThread = 0;
+ }
+ bLiveSnapshot = false;
+ rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
+ if (!rc) {
+ pthread_setname_np(mLiveSnapshotThread, "CAM_cancel_liveSnap");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancelLiveSnapshot_internal
+ *
+ * DESCRIPTION: cancel live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot_internal() {
+ int rc = NO_ERROR;
+
+ unconfigureAdvancedCapture();
+ LOGH("Enable display frames again");
+ setDisplaySkip(FALSE);
+
+ if (!mLongshotEnabled) {
+ m_perfLock.lock_rel();
+ }
+
+ //stop post processor
+ m_postprocessor.stop();
+
+ // stop snapshot channel
+ if (!mParameters.isTNRSnapshotEnabled()) {
+ rc = stopChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ } else {
+ QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+ if (NULL != pChannel) {
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0 ; i < pChannel->getNumOfStreams(); i++ ) {
+ pStream = pChannel->getStreamByIndex(i);
+ if ((NULL != pStream) &&
+ (CAM_STREAM_TYPE_SNAPSHOT ==
+ pStream->getMyType())) {
+ break;
+ }
+ }
+ if (pStream != NULL) {
+ LOGD("REQUEST_FRAMES event for TNR snapshot");
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+ param.type = CAM_STREAM_PARAM_TYPE_REQUEST_FRAMES;
+ param.frameRequest.enableStream = 0;
+ rc = pStream->setParameter(param);
+ if (rc != NO_ERROR) {
+ LOGE("Stream Event REQUEST_FRAMES failed");
+ }
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : putParameters
+ *
+ * DESCRIPTION: put parameters string impl
+ *
+ * PARAMETERS :
+ * @parms : parameters string to be released
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::putParameters(char *parms)
+{
+ free(parms);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : sendCommand
+ *
+ * DESCRIPTION: send command impl
+ *
+ * PARAMETERS :
+ * @command : command to be executed
+ * @arg1 : optional argument 1
+ * @arg2 : optional argument 2
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::sendCommand(int32_t command,
+ __unused int32_t &arg1, __unused int32_t &arg2)
+{
+ int rc = NO_ERROR;
+
+ switch (command) {
+#ifndef VANILLA_HAL
+ case CAMERA_CMD_LONGSHOT_ON:
+ m_perfLock.lock_acq();
+ arg1 = arg2 = 0;
+ // Longshot can only be enabled when image capture
+ // is not active.
+ if ( !m_stateMachine.isCaptureRunning() ) {
+ LOGI("Longshot Enabled");
+ mLongshotEnabled = true;
+ rc = mParameters.setLongshotEnable(mLongshotEnabled);
+
+ // Due to recent buffer count optimizations
+ // ZSL might run with considerably less buffers
+ // when not in longshot mode. Preview needs to
+ // restart in this case.
+ if (isZSLMode() && m_stateMachine.isPreviewRunning()) {
+ QCameraChannel *pChannel = NULL;
+ QCameraStream *pSnapStream = NULL;
+ pChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+ if (NULL != pChannel) {
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0; i < pChannel->getNumOfStreams(); i++) {
+ pStream = pChannel->getStreamByIndex(i);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ pSnapStream = pStream;
+ break;
+ }
+ }
+ }
+ if (NULL != pSnapStream) {
+ uint8_t required = 0;
+ required = getBufNumRequired(CAM_STREAM_TYPE_SNAPSHOT);
+ if (pSnapStream->getBufferCount() < required) {
+ // We restart here, to reset the FPS and no
+ // of buffers as per the requirement of longshot usecase.
+ arg1 = QCAMERA_SM_EVT_RESTART_PERVIEW;
+ if (getRelatedCamSyncInfo()->sync_control ==
+ CAM_SYNC_RELATED_SENSORS_ON) {
+ arg2 = QCAMERA_SM_EVT_DELAYED_RESTART;
+ }
+ }
+ }
+ }
+ }
+ //
+ mPrepSnapRun = false;
+ mCACDoneReceived = FALSE;
+ } else {
+ rc = NO_INIT;
+ }
+ break;
+ case CAMERA_CMD_LONGSHOT_OFF:
+ m_perfLock.lock_rel();
+ if ( mLongshotEnabled && m_stateMachine.isCaptureRunning() ) {
+ cancelPicture();
+ processEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE, NULL);
+ QCameraChannel *pZSLChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+ if (isZSLMode() && (NULL != pZSLChannel) && mPrepSnapRun) {
+ mCameraHandle->ops->stop_zsl_snapshot(
+ mCameraHandle->camera_handle,
+ pZSLChannel->getMyHandle());
+ }
+ }
+ mPrepSnapRun = false;
+ LOGI("Longshot Disabled");
+ mLongshotEnabled = false;
+ rc = mParameters.setLongshotEnable(mLongshotEnabled);
+ mCACDoneReceived = FALSE;
+ break;
+ case CAMERA_CMD_HISTOGRAM_ON:
+ case CAMERA_CMD_HISTOGRAM_OFF:
+ rc = setHistogram(command == CAMERA_CMD_HISTOGRAM_ON? true : false);
+ LOGH("Histogram -> %s",
+ mParameters.isHistogramEnabled() ? "Enabled" : "Disabled");
+ break;
+#endif
+ case CAMERA_CMD_START_FACE_DETECTION:
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+ mParameters.setFaceDetectionOption(command == CAMERA_CMD_START_FACE_DETECTION? true : false);
+ rc = setFaceDetection(command == CAMERA_CMD_START_FACE_DETECTION? true : false);
+ LOGH("FaceDetection -> %s",
+ mParameters.isFaceDetectionEnabled() ? "Enabled" : "Disabled");
+ break;
+#ifndef VANILLA_HAL
+ case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+#endif
+ default:
+ rc = NO_ERROR;
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : registerFaceImage
+ *
+ * DESCRIPTION: register face image impl
+ *
+ * PARAMETERS :
+ * @img_ptr : ptr to image buffer
+ * @config : ptr to config struct about input image info
+ * @faceID : [OUT] face ID to uniquely identifiy the registered face image
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::registerFaceImage(void *img_ptr,
+ cam_pp_offline_src_config_t *config,
+ int32_t &faceID)
+{
+ int rc = NO_ERROR;
+ faceID = -1;
+
+ if (img_ptr == NULL || config == NULL) {
+ LOGE("img_ptr or config is NULL");
+ return BAD_VALUE;
+ }
+
+ // allocate ion memory for source image
+ QCameraHeapMemory *imgBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+ if (imgBuf == NULL) {
+ LOGE("Unable to new heap memory obj for image buf");
+ return NO_MEMORY;
+ }
+
+ rc = imgBuf->allocate(1, config->input_buf_planes.plane_info.frame_len, NON_SECURE);
+ if (rc < 0) {
+ LOGE("Unable to allocate heap memory for image buf");
+ delete imgBuf;
+ return NO_MEMORY;
+ }
+
+ void *pBufPtr = imgBuf->getPtr(0);
+ if (pBufPtr == NULL) {
+ LOGE("image buf is NULL");
+ imgBuf->deallocate();
+ delete imgBuf;
+ return NO_MEMORY;
+ }
+ memcpy(pBufPtr, img_ptr, config->input_buf_planes.plane_info.frame_len);
+
+ cam_pp_feature_config_t pp_feature;
+ memset(&pp_feature, 0, sizeof(cam_pp_feature_config_t));
+ pp_feature.feature_mask = CAM_QCOM_FEATURE_REGISTER_FACE;
+ QCameraReprocessChannel *pChannel =
+ addOfflineReprocChannel(*config, pp_feature, NULL, NULL);
+
+ if (pChannel == NULL) {
+ LOGE("fail to add offline reprocess channel");
+ imgBuf->deallocate();
+ delete imgBuf;
+ return UNKNOWN_ERROR;
+ }
+
+ rc = pChannel->start();
+ if (rc != NO_ERROR) {
+ LOGE("Cannot start reprocess channel");
+ imgBuf->deallocate();
+ delete imgBuf;
+ delete pChannel;
+ return rc;
+ }
+
+ ssize_t bufSize = imgBuf->getSize(0);
+ if (BAD_INDEX != bufSize) {
+ rc = pChannel->doReprocess(imgBuf->getFd(0), (size_t)bufSize, faceID);
+ } else {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ return UNKNOWN_ERROR;
+ }
+
+ // done with register face image, free imgbuf and delete reprocess channel
+ imgBuf->deallocate();
+ delete imgBuf;
+ imgBuf = NULL;
+ pChannel->stop();
+ delete pChannel;
+ pChannel = NULL;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : release
+ *
+ * DESCRIPTION: release camera resource impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::release()
+{
+ // stop and delete all channels
+ for (int i = 0; i <QCAMERA_CH_TYPE_MAX ; i++) {
+ if (m_channels[i] != NULL) {
+ stopChannel((qcamera_ch_type_enum_t)i);
+ delChannel((qcamera_ch_type_enum_t)i);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : dump
+ *
+ * DESCRIPTION: camera status dump impl
+ *
+ * PARAMETERS :
+ * @fd : fd for the buffer to be dumped with camera status
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(int fd)
+{
+ dprintf(fd, "\n Camera HAL information Begin \n");
+ dprintf(fd, "Camera ID: %d \n", mCameraId);
+ dprintf(fd, "StoreMetaDataInFrame: %d \n", mStoreMetaDataInFrame);
+ dprintf(fd, "\n Configuration: %s", mParameters.dump().string());
+ dprintf(fd, "\n State Information: %s", m_stateMachine.dump().string());
+ dprintf(fd, "\n Camera HAL information End \n");
+
+ /* send UPDATE_DEBUG_LEVEL to the backend so that they can read the
+ debug level property */
+ mParameters.updateDebugLevel();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processAPI
+ *
+ * DESCRIPTION: process API calls from upper layer
+ *
+ * PARAMETERS :
+ * @api : API to be processed
+ * @api_payload : ptr to API payload if any
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
+{
+ int ret = DEAD_OBJECT;
+
+ if (m_smThreadActive) {
+ ret = m_stateMachine.procAPI(api, api_payload);
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : processEvt
+ *
+ * DESCRIPTION: process Evt from backend via mm-camera-interface
+ *
+ * PARAMETERS :
+ * @evt : event type to be processed
+ * @evt_payload : ptr to event payload if any
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+ return m_stateMachine.procEvt(evt, evt_payload);
+}
+
+/*===========================================================================
+ * FUNCTION : processSyncEvt
+ *
+ * DESCRIPTION: process synchronous Evt from backend
+ *
+ * PARAMETERS :
+ * @evt : event type to be processed
+ * @evt_payload : ptr to event payload if any
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+ int rc = NO_ERROR;
+
+ pthread_mutex_lock(&m_evtLock);
+ rc = processEvt(evt, evt_payload);
+ if (rc == NO_ERROR) {
+ memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+ while (m_evtResult.request_api != evt) {
+ pthread_cond_wait(&m_evtCond, &m_evtLock);
+ }
+ rc = m_evtResult.status;
+ }
+ pthread_mutex_unlock(&m_evtLock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : evtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-camera-interface to handle backend events
+ *
+ * PARAMETERS :
+ * @camera_handle : event type to be processed
+ * @evt : ptr to event
+ * @user_data : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
+ mm_camera_event_t *evt,
+ void *user_data)
+{
+ QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)user_data;
+ if (obj && evt) {
+ mm_camera_event_t *payload =
+ (mm_camera_event_t *)malloc(sizeof(mm_camera_event_t));
+ if (NULL != payload) {
+ *payload = *evt;
+ //peek into the event, if this is an eztune event from server,
+ //then we don't need to post it to the SM Qs, we shud directly
+ //spawn a thread and get the job done (jpeg or raw snapshot)
+ switch (payload->server_event_type) {
+ case CAM_EVENT_TYPE_INT_TAKE_JPEG:
+ //Received JPEG trigger from eztune
+ if (false == obj->m_bIntJpegEvtPending) {
+ pthread_mutex_lock(&obj->m_int_lock);
+ obj->m_bIntJpegEvtPending = true;
+ pthread_mutex_unlock(&obj->m_int_lock);
+ obj->takePictureInternal();
+ }
+ free(payload);
+ break;
+ case CAM_EVENT_TYPE_INT_TAKE_RAW:
+ //Received RAW trigger from eztune
+ if (false == obj->m_bIntRawEvtPending) {
+ pthread_mutex_lock(&obj->m_int_lock);
+ obj->m_bIntRawEvtPending = true;
+ pthread_mutex_unlock(&obj->m_int_lock);
+ obj->takePictureInternal();
+ }
+ free(payload);
+ break;
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ Mutex::Autolock l(obj->mDefLock);
+ obj->mDefCond.broadcast();
+ LOGH("broadcast mDefCond signal\n");
+ }
+ default:
+ obj->processEvt(QCAMERA_SM_EVT_EVT_NOTIFY, payload);
+ break;
+ }
+ }
+ } else {
+ LOGE("NULL user_data");
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : jpegEvtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events
+ *
+ * PARAMETERS :
+ * @status : status of jpeg job
+ * @client_hdl: jpeg client handle
+ * @jobId : jpeg job Id
+ * @p_ouput : ptr to jpeg output result struct
+ * @userdata : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::jpegEvtHandle(jpeg_job_status_t status,
+ uint32_t /*client_hdl*/,
+ uint32_t jobId,
+ mm_jpeg_output_t *p_output,
+ void *userdata)
+{
+ QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)userdata;
+ if (obj) {
+ qcamera_jpeg_evt_payload_t *payload =
+ (qcamera_jpeg_evt_payload_t *)malloc(sizeof(qcamera_jpeg_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_jpeg_evt_payload_t));
+ payload->status = status;
+ payload->jobId = jobId;
+ if (p_output != NULL) {
+ payload->out_data = *p_output;
+ }
+ obj->processEvt(QCAMERA_SM_EVT_JPEG_EVT_NOTIFY, payload);
+ }
+ } else {
+ LOGE("NULL user_data");
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : thermalEvtHandle
+ *
+ * DESCRIPTION: routine to handle thermal event notification
+ *
+ * PARAMETERS :
+ * @level : thermal level
+ * @userdata : userdata passed in during registration
+ * @data : opaque data from thermal client
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::thermalEvtHandle(
+ qcamera_thermal_level_enum_t *level, void *userdata, void *data)
+{
+ if (!mCameraOpened) {
+ LOGH("Camera is not opened, no need to handle thermal evt");
+ return NO_ERROR;
+ }
+
+ // Make sure thermal events are logged
+ LOGH("level = %d, userdata = %p, data = %p",
+ *level, userdata, data);
+ //We don't need to lockAPI, waitAPI here. QCAMERA_SM_EVT_THERMAL_NOTIFY
+ // becomes an aync call. This also means we can only pass payload
+ // by value, not by address.
+ return processAPI(QCAMERA_SM_EVT_THERMAL_NOTIFY, (void *)level);
+}
+
+/*===========================================================================
+ * FUNCTION : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify to notify thread
+ *
+ * PARAMETERS :
+ * @msg_type: msg type to be sent
+ * @ext1 : optional extension1
+ * @ext2 : optional extension2
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendEvtNotify(int32_t msg_type,
+ int32_t ext1,
+ int32_t ext2)
+{
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+ cbArg.msg_type = msg_type;
+ cbArg.ext1 = ext1;
+ cbArg.ext2 = ext2;
+ return m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION : processAEInfo
+ *
+ * DESCRIPTION: process AE updates
+ *
+ * PARAMETERS :
+ * @ae_params: current AE parameters
+ *
+ * RETURN : None
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAEInfo(cam_3a_params_t &ae_params)
+{
+ mParameters.updateAEInfo(ae_params);
+ if (mParameters.isInstantAECEnabled()) {
+ // Reset Instant AEC info only if instant aec enabled.
+ bool bResetInstantAec = false;
+ if (ae_params.settled) {
+ // If AEC settled, reset instant AEC
+ bResetInstantAec = true;
+ } else if ((mParameters.isInstantCaptureEnabled()) &&
+ (mInstantAecFrameCount >= mParameters.getAecFrameBoundValue())) {
+ // if AEC not settled, and instant capture enabled,
+ // reset instant AEC only when frame count is
+ // more or equal to AEC frame bound value.
+ bResetInstantAec = true;
+ } else if ((mParameters.isInstantAECEnabled()) &&
+ (mInstantAecFrameCount >= mParameters.getAecSkipDisplayFrameBound())) {
+ // if AEC not settled, and only instant AEC enabled,
+ // reset instant AEC only when frame count is
+ // more or equal to AEC skip display frame bound value.
+ bResetInstantAec = true;
+ }
+
+ if (bResetInstantAec) {
+ LOGD("setting instant AEC to false");
+ mParameters.setInstantAEC(false, true);
+ mInstantAecFrameCount = 0;
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processFocusPositionInfo
+ *
+ * DESCRIPTION: process AF updates
+ *
+ * PARAMETERS :
+ * @cur_pos_info: current lens position
+ *
+ * RETURN : None
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processFocusPositionInfo(cam_focus_pos_info_t &cur_pos_info)
+{
+ mParameters.updateCurrentFocusPosition(cur_pos_info);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processAutoFocusEvent
+ *
+ * DESCRIPTION: process auto focus event
+ *
+ * PARAMETERS :
+ * @focus_data: struct containing auto focus result info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAutoFocusEvent(cam_auto_focus_data_t &focus_data)
+{
+ int32_t ret = NO_ERROR;
+ LOGH("E");
+
+ if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
+ // Ignore focus updates
+ LOGH("X Secondary Camera, no need to process!! ");
+ return ret;
+ }
+ cam_focus_mode_type focusMode = mParameters.getFocusMode();
+ LOGH("[AF_DBG] focusMode=%d, focusState=%d",
+ focusMode, focus_data.focus_state);
+
+ switch (focusMode) {
+ case CAM_FOCUS_MODE_AUTO:
+ case CAM_FOCUS_MODE_MACRO:
+ // ignore AF event if AF was already cancelled meanwhile
+ if (!mActiveAF) {
+ break;
+ }
+ // If the HAL focus mode is different from AF INFINITY focus mode, send event to app
+ if ((focus_data.focus_mode == CAM_FOCUS_MODE_INFINITY) &&
+ (focus_data.focus_state == CAM_AF_STATE_INACTIVE)) {
+ ret = sendEvtNotify(CAMERA_MSG_FOCUS, true, 0);
+ mActiveAF = false; // reset the mActiveAF in this special case
+ break;
+ }
+
+ //while transitioning from CAF->Auto/Macro, we might receive CAF related
+ //events (PASSIVE_*) due to timing. Ignore such events if any.
+ if ((focus_data.focus_state == CAM_AF_STATE_PASSIVE_SCAN) ||
+ (focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+ (focus_data.focus_state == CAM_AF_STATE_PASSIVE_UNFOCUSED)) {
+ break;
+ }
+
+ //This is just an intermediate update to HAL indicating focus is in progress. No need
+ //to send this event to app. Same applies to INACTIVE state as well.
+ if ((focus_data.focus_state == CAM_AF_STATE_ACTIVE_SCAN) ||
+ (focus_data.focus_state == CAM_AF_STATE_INACTIVE)) {
+ break;
+ }
+ // update focus distance
+ mParameters.updateFocusDistances(&focus_data.focus_dist);
+
+ //flush any old snapshot frames in ZSL Q which are not focused.
+ if (mParameters.isZSLMode() && focus_data.flush_info.needFlush ) {
+ QCameraPicChannel *pZSLChannel =
+ (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+ if (NULL != pZSLChannel) {
+ //flush the zsl-buffer
+ uint32_t flush_frame_idx = focus_data.flush_info.focused_frame_idx;
+ LOGD("flush the zsl-buffer before frame = %u.", flush_frame_idx);
+ pZSLChannel->flushSuperbuffer(flush_frame_idx);
+ }
+ }
+
+ //send event to app finally
+ LOGI("Send AF DOne event to app");
+ ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+ (focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED), 0);
+ break;
+ case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+ case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+
+ // If the HAL focus mode is different from AF INFINITY focus mode, send event to app
+ if ((focus_data.focus_mode == CAM_FOCUS_MODE_INFINITY) &&
+ (focus_data.focus_state == CAM_AF_STATE_INACTIVE)) {
+ ret = sendEvtNotify(CAMERA_MSG_FOCUS, false, 0);
+ mActiveAF = false; // reset the mActiveAF in this special case
+ break;
+ }
+
+ //If AutoFocus() is triggered while in CAF mode, ignore all CAF events (PASSIVE_*) and
+ //process/wait for only ACTIVE_* events.
+ if (((focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+ (focus_data.focus_state == CAM_AF_STATE_PASSIVE_UNFOCUSED) ||
+ (focus_data.focus_state == CAM_AF_STATE_PASSIVE_SCAN)) && mActiveAF) {
+ break;
+ }
+
+ //These are the AF states for which we need to send notification to app in CAF mode.
+ //This includes both regular CAF (PASSIVE) events as well as ACTIVE events ( in case
+ //AF is triggered while in CAF mode)
+ if ((focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+ (focus_data.focus_state == CAM_AF_STATE_PASSIVE_UNFOCUSED) ||
+ (focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED) ||
+ (focus_data.focus_state == CAM_AF_STATE_NOT_FOCUSED_LOCKED)) {
+
+ // update focus distance
+ mParameters.updateFocusDistances(&focus_data.focus_dist);
+
+ if (mParameters.isZSLMode() && focus_data.flush_info.needFlush ) {
+ QCameraPicChannel *pZSLChannel =
+ (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+ if (NULL != pZSLChannel) {
+ //flush the zsl-buffer
+ uint32_t flush_frame_idx = focus_data.flush_info.focused_frame_idx;
+ LOGD("flush the zsl-buffer before frame = %u.", flush_frame_idx);
+ pZSLChannel->flushSuperbuffer(flush_frame_idx);
+ }
+ }
+
+ if (mActiveAF) {
+ LOGI("Send AF Done event to app");
+ }
+ ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+ ((focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+ (focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED)), 0);
+ }
+ ret = sendEvtNotify(CAMERA_MSG_FOCUS_MOVE,
+ (focus_data.focus_state == CAM_AF_STATE_PASSIVE_SCAN), 0);
+ break;
+ case CAM_FOCUS_MODE_INFINITY:
+ case CAM_FOCUS_MODE_FIXED:
+ case CAM_FOCUS_MODE_EDOF:
+ default:
+ LOGH("no ops for autofocus event in focusmode %d", focusMode);
+ break;
+ }
+
+ //Reset mActiveAF once we receive focus done event
+ if ((focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED) ||
+ (focus_data.focus_state == CAM_AF_STATE_NOT_FOCUSED_LOCKED)) {
+ mActiveAF = false;
+ }
+
+ LOGH("X");
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : processZoomEvent
+ *
+ * DESCRIPTION: process zoom event
+ *
+ * PARAMETERS :
+ * @crop_info : crop info as a result of zoom operation
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processZoomEvent(cam_crop_data_t &crop_info)
+{
+ int32_t ret = NO_ERROR;
+
+ for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+ if (m_channels[i] != NULL) {
+ ret = m_channels[i]->processZoomDone(mPreviewWindow, crop_info);
+ }
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : processZSLCaptureDone
+ *
+ * DESCRIPTION: process ZSL capture done events
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processZSLCaptureDone()
+{
+ int rc = NO_ERROR;
+
+ if (++mInputCount >= mParameters.getBurstCountForAdvancedCapture()) {
+ rc = unconfigureAdvancedCapture();
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : processRetroAECUnlock
+ *
+ * DESCRIPTION: process retro burst AEC unlock events
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processRetroAECUnlock()
+{
+ int rc = NO_ERROR;
+
+ LOGH("LED assisted AF Release AEC Lock");
+ rc = mParameters.setAecLock("false");
+ if (NO_ERROR != rc) {
+ LOGE("Error setting AEC lock");
+ return rc;
+ }
+
+ rc = mParameters.commitParameters();
+ if (NO_ERROR != rc) {
+ LOGE("Error during camera parameter commit");
+ } else {
+ m_bLedAfAecLock = FALSE;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : processHDRData
+ *
+ * DESCRIPTION: process HDR scene events
+ *
+ * PARAMETERS :
+ * @hdr_scene : HDR scene event data
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHDRData(
+ __unused cam_asd_hdr_scene_data_t hdr_scene)
+{
+ int rc = NO_ERROR;
+
+#ifndef VANILLA_HAL
+ if (hdr_scene.is_hdr_scene &&
+ (hdr_scene.hdr_confidence > HDR_CONFIDENCE_THRESHOLD) &&
+ mParameters.isAutoHDREnabled()) {
+ m_HDRSceneEnabled = true;
+ } else {
+ m_HDRSceneEnabled = false;
+ }
+ mParameters.setHDRSceneEnable(m_HDRSceneEnabled);
+
+ if ( msgTypeEnabled(CAMERA_MSG_META_DATA) ) {
+
+ size_t data_len = sizeof(int);
+ size_t buffer_len = 1 *sizeof(int) //meta type
+ + 1 *sizeof(int) //data len
+ + 1 *sizeof(int); //data
+ camera_memory_t *hdrBuffer = mGetMemory(-1,
+ buffer_len,
+ 1,
+ mCallbackCookie);
+ if ( NULL == hdrBuffer ) {
+ LOGE("Not enough memory for auto HDR data");
+ return NO_MEMORY;
+ }
+
+ int *pHDRData = (int *)hdrBuffer->data;
+ if (pHDRData == NULL) {
+ LOGE("memory data ptr is NULL");
+ return UNKNOWN_ERROR;
+ }
+
+ pHDRData[0] = CAMERA_META_DATA_HDR;
+ pHDRData[1] = (int)data_len;
+ pHDRData[2] = m_HDRSceneEnabled;
+
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_META_DATA;
+ cbArg.data = hdrBuffer;
+ cbArg.user_data = hdrBuffer;
+ cbArg.cookie = this;
+ cbArg.release_cb = releaseCameraMemory;
+ rc = m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE("fail sending auto HDR notification");
+ hdrBuffer->release(hdrBuffer);
+ }
+ }
+
+ LOGH("hdr_scene_data: processHDRData: %d %f",
+ hdr_scene.is_hdr_scene,
+ hdr_scene.hdr_confidence);
+
+#endif
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : transAwbMetaToParams
+ *
+ * DESCRIPTION: translate awb params from metadata callback to QCameraParametersIntf
+ *
+ * PARAMETERS :
+ * @awb_params : awb params from metadata callback
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::transAwbMetaToParams(cam_awb_params_t &awb_params)
+{
+ mParameters.updateAWBParams(awb_params);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processPrepSnapshotDone
+ *
+ * DESCRIPTION: process prep snapshot done event
+ *
+ * PARAMETERS :
+ * @prep_snapshot_state : state of prepare snapshot done. In other words,
+ * i.e. whether need future frames for capture.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processPrepSnapshotDoneEvent(
+ cam_prep_snapshot_state_t prep_snapshot_state)
+{
+ int32_t ret = NO_ERROR;
+ LOGI("[KPI Perf]: Received PREPARE SANSPHOT Done event state = %d",
+ prep_snapshot_state);
+ if (m_channels[QCAMERA_CH_TYPE_ZSL] &&
+ prep_snapshot_state == NEED_FUTURE_FRAME) {
+ LOGH("already handled in mm-camera-intf, no ops here");
+ if (isRetroPicture()) {
+ mParameters.setAecLock("true");
+ mParameters.commitParameters();
+ m_bLedAfAecLock = TRUE;
+ }
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : processASDUpdate
+ *
+ * DESCRIPTION: process ASD update event
+ *
+ * PARAMETERS :
+ * @scene: selected scene mode
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processASDUpdate(
+ __unused cam_asd_decision_t asd_decision)
+{
+ if ( msgTypeEnabled(CAMERA_MSG_META_DATA) ) {
+ size_t data_len = sizeof(cam_auto_scene_t);
+ size_t buffer_len = 1 *sizeof(int) //meta type
+ + 1 *sizeof(int) //data len
+ + data_len; //data
+ camera_memory_t *asdBuffer = mGetMemory(-1,
+ buffer_len, 1, mCallbackCookie);
+ if ( NULL == asdBuffer ) {
+ LOGE("Not enough memory for histogram data");
+ return NO_MEMORY;
+ }
+
+ int *pASDData = (int *)asdBuffer->data;
+ if (pASDData == NULL) {
+ LOGE("memory data ptr is NULL");
+ return UNKNOWN_ERROR;
+ }
+
+#ifndef VANILLA_HAL
+ pASDData[0] = CAMERA_META_DATA_ASD;
+ pASDData[1] = (int)data_len;
+ pASDData[2] = asd_decision.detected_scene;
+
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_META_DATA;
+ cbArg.data = asdBuffer;
+ cbArg.user_data = asdBuffer;
+ cbArg.cookie = this;
+ cbArg.release_cb = releaseCameraMemory;
+ int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE("fail sending notification");
+ asdBuffer->release(asdBuffer);
+ }
+#endif
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processJpegNotify
+ *
+ * DESCRIPTION: process jpeg event
+ *
+ * PARAMETERS :
+ * @jpeg_evt: ptr to jpeg event payload
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_evt)
+{
+ return m_postprocessor.processJpegEvt(jpeg_evt);
+}
+
+/*===========================================================================
+ * FUNCTION : lockAPI
+ *
+ * DESCRIPTION: lock to process API
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::lockAPI()
+{
+ pthread_mutex_lock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : waitAPIResult
+ *
+ * DESCRIPTION: wait for API result coming back. This is a blocking call, it will
+ * return only cerntain API event type arrives
+ *
+ * PARAMETERS :
+ * @api_evt : API event type
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::waitAPIResult(qcamera_sm_evt_enum_t api_evt,
+ qcamera_api_result_t *apiResult)
+{
+ LOGD("wait for API result of evt (%d)", api_evt);
+ int resultReceived = 0;
+ while (!resultReceived) {
+ pthread_cond_wait(&m_cond, &m_lock);
+ if (m_apiResultList != NULL) {
+ api_result_list *apiResultList = m_apiResultList;
+ api_result_list *apiResultListPrevious = m_apiResultList;
+ while (apiResultList != NULL) {
+ if (apiResultList->result.request_api == api_evt) {
+ resultReceived = 1;
+ *apiResult = apiResultList->result;
+ apiResultListPrevious->next = apiResultList->next;
+ if (apiResultList == m_apiResultList) {
+ m_apiResultList = apiResultList->next;
+ }
+ free(apiResultList);
+ break;
+ }
+ else {
+ apiResultListPrevious = apiResultList;
+ apiResultList = apiResultList->next;
+ }
+ }
+ }
+ }
+ LOGD("return (%d) from API result wait for evt (%d)",
+ apiResult->status, api_evt);
+}
+
+
+/*===========================================================================
+ * FUNCTION : unlockAPI
+ *
+ * DESCRIPTION: API processing is done, unlock
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unlockAPI()
+{
+ pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : signalAPIResult
+ *
+ * DESCRIPTION: signal condition viarable that cerntain API event type arrives
+ *
+ * PARAMETERS :
+ * @result : API result
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalAPIResult(qcamera_api_result_t *result)
+{
+
+ pthread_mutex_lock(&m_lock);
+ api_result_list *apiResult = (api_result_list *)malloc(sizeof(api_result_list));
+ if (apiResult == NULL) {
+ LOGE("ERROR: malloc for api result failed, Result will not be sent");
+ goto malloc_failed;
+ }
+ apiResult->result = *result;
+ apiResult->next = NULL;
+ if (m_apiResultList == NULL) m_apiResultList = apiResult;
+ else {
+ api_result_list *apiResultList = m_apiResultList;
+ while(apiResultList->next != NULL) apiResultList = apiResultList->next;
+ apiResultList->next = apiResult;
+ }
+malloc_failed:
+ pthread_cond_broadcast(&m_cond);
+ pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : signalEvtResult
+ *
+ * DESCRIPTION: signal condition variable that certain event was processed
+ *
+ * PARAMETERS :
+ * @result : Event result
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalEvtResult(qcamera_api_result_t *result)
+{
+ pthread_mutex_lock(&m_evtLock);
+ m_evtResult = *result;
+ pthread_cond_signal(&m_evtCond);
+ pthread_mutex_unlock(&m_evtLock);
+}
+
+int32_t QCamera2HardwareInterface::prepareRawStream(QCameraChannel *curChannel)
+{
+ int32_t rc = NO_ERROR;
+ cam_dimension_t str_dim,max_dim;
+ QCameraChannel *pChannel;
+
+ max_dim.width = 0;
+ max_dim.height = 0;
+
+ for (int j = 0; j < QCAMERA_CH_TYPE_MAX; j++) {
+ if (m_channels[j] != NULL) {
+ pChannel = m_channels[j];
+ for (uint8_t i = 0; i < pChannel->getNumOfStreams(); i++) {
+ QCameraStream *pStream = pChannel->getStreamByIndex(i);
+ if (pStream != NULL) {
+ if ((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+ continue;
+ }
+ pStream->getFrameDimension(str_dim);
+ if (str_dim.width > max_dim.width) {
+ max_dim.width = str_dim.width;
+ }
+ if (str_dim.height > max_dim.height) {
+ max_dim.height = str_dim.height;
+ }
+ }
+ }
+ }
+ }
+
+ for (uint8_t i = 0; i < curChannel->getNumOfStreams(); i++) {
+ QCameraStream *pStream = curChannel->getStreamByIndex(i);
+ if (pStream != NULL) {
+ if ((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+ continue;
+ }
+ pStream->getFrameDimension(str_dim);
+ if (str_dim.width > max_dim.width) {
+ max_dim.width = str_dim.width;
+ }
+ if (str_dim.height > max_dim.height) {
+ max_dim.height = str_dim.height;
+ }
+ }
+ }
+ rc = mParameters.updateRAW(max_dim);
+ return rc;
+}
+/*===========================================================================
+ * FUNCTION : addStreamToChannel
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ * @pChannel : ptr to channel obj
+ * @streamType : type of stream to be added
+ * @streamCB : callback of stream
+ * @userData : user data ptr to callback
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addStreamToChannel(QCameraChannel *pChannel,
+ cam_stream_type_t streamType,
+ stream_cb_routine streamCB,
+ void *userData)
+{
+ int32_t rc = NO_ERROR;
+
+ if (streamType == CAM_STREAM_TYPE_RAW) {
+ prepareRawStream(pChannel);
+ }
+ QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(streamType);
+ if (pStreamInfo == NULL) {
+ LOGE("no mem for stream info buf");
+ return NO_MEMORY;
+ }
+ uint8_t minStreamBufNum = getBufNumRequired(streamType);
+ bool bDynAllocBuf = false;
+ if (isZSLMode() && streamType == CAM_STREAM_TYPE_SNAPSHOT) {
+ bDynAllocBuf = true;
+ }
+
+ cam_padding_info_t padding_info;
+
+ if (streamType == CAM_STREAM_TYPE_ANALYSIS) {
+ cam_analysis_info_t analysisInfo;
+ cam_feature_mask_t featureMask;
+
+ featureMask = 0;
+ mParameters.getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+ rc = mParameters.getAnalysisInfo(
+ ((mParameters.getRecordingHintValue() == true) &&
+ mParameters.fdModeInVideo()),
+ FALSE,
+ featureMask,
+ &analysisInfo);
+ if (rc != NO_ERROR) {
+ LOGE("getAnalysisInfo failed, ret = %d", rc);
+ return rc;
+ }
+
+ padding_info = analysisInfo.analysis_padding_info;
+ } else {
+ padding_info =
+ gCamCapability[mCameraId]->padding_info;
+ if (streamType == CAM_STREAM_TYPE_PREVIEW) {
+ padding_info.width_padding = mSurfaceStridePadding;
+ padding_info.height_padding = CAM_PAD_TO_2;
+ }
+ if((!needReprocess())
+ || (streamType != CAM_STREAM_TYPE_SNAPSHOT)
+ || (!mParameters.isLLNoiseEnabled())) {
+ padding_info.offset_info.offset_x = 0;
+ padding_info.offset_info.offset_y = 0;
+ }
+ }
+
+ bool deferAllocation = needDeferred(streamType);
+ LOGD("deferAllocation = %d bDynAllocBuf = %d, stream type = %d",
+ deferAllocation, bDynAllocBuf, streamType);
+ rc = pChannel->addStream(*this,
+ pStreamInfo,
+ NULL,
+ minStreamBufNum,
+ &padding_info,
+ streamCB, userData,
+ bDynAllocBuf,
+ deferAllocation);
+
+ if (rc != NO_ERROR) {
+ LOGE("add stream type (%d) failed, ret = %d",
+ streamType, rc);
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addPreviewChannel
+ *
+ * DESCRIPTION: add a preview channel that contains a preview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addPreviewChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = NULL;
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+
+
+ if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+ // if we had preview channel before, delete it first
+ delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
+ m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
+ }
+
+ pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for preview channel");
+ return NO_MEMORY;
+ }
+
+ // preview only channel, don't need bundle attr and cb
+ rc = pChannel->init(NULL, NULL, NULL);
+ if (rc != NO_ERROR) {
+ LOGE("init preview channel failed, ret = %d", rc);
+ return rc;
+ }
+
+ // meta data stream always coexists with preview if applicable
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+ metadata_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add metadata stream failed, ret = %d", rc);
+ return rc;
+ }
+
+ if (isRdiMode()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+ rdi_mode_stream_cb_routine, this);
+ } else {
+ if (isNoDisplayMode()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+ nodisplay_preview_stream_cb_routine, this);
+ } else {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+ preview_stream_cb_routine, this);
+ pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
+ synchronous_stream_cb_routine);
+ }
+ }
+
+ if (((mParameters.fdModeInVideo())
+ || (mParameters.getDcrf() == true)
+ || (mParameters.getRecordingHintValue() != true))
+ && (!mParameters.isSecureMode())) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS,
+ NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("add Analysis stream failed, ret = %d", rc);
+ return rc;
+ }
+ }
+
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+ if ( raw_yuv ) {
+ rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW,
+ preview_raw_stream_cb_routine,this);
+ if ( rc != NO_ERROR ) {
+ LOGE("add raw stream failed, ret = %d", __FUNCTION__, rc);
+ delete pChannel;
+ return rc;
+ }
+ }
+
+ if (rc != NO_ERROR) {
+ LOGE("add preview stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addVideoChannel
+ *
+ * DESCRIPTION: add a video channel that contains a video stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addVideoChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraVideoChannel *pChannel = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_VIDEO] != NULL) {
+ // if we had video channel before, delete it first
+ delete m_channels[QCAMERA_CH_TYPE_VIDEO];
+ m_channels[QCAMERA_CH_TYPE_VIDEO] = NULL;
+ }
+
+ pChannel = new QCameraVideoChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for video channel");
+ return NO_MEMORY;
+ }
+
+ if (isLowPowerMode()) {
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.look_back = 0; //wait for future frame for liveshot
+ attr.post_frame_skip = mParameters.getZSLBurstInterval();
+ attr.water_mark = 1; //hold min buffers possible in Q
+ attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+ rc = pChannel->init(&attr, snapshot_channel_cb_routine, this);
+ } else {
+ // preview only channel, don't need bundle attr and cb
+ rc = pChannel->init(NULL, NULL, NULL);
+ }
+
+ if (rc != 0) {
+ LOGE("init video channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_VIDEO,
+ video_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add video stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ m_channels[QCAMERA_CH_TYPE_VIDEO] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addSnapshotChannel
+ *
+ * DESCRIPTION: add a snapshot channel that contains a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ * NOTE : Add this channel for live snapshot usecase. Regular capture will
+ * use addCaptureChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addSnapshotChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_SNAPSHOT] != NULL) {
+ // if we had ZSL channel before, delete it first
+ delete m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+ m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = NULL;
+ }
+
+ pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for snapshot channel");
+ return NO_MEMORY;
+ }
+
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+ attr.look_back = 0; //wait for future frame for liveshot
+ attr.post_frame_skip = mParameters.getZSLBurstInterval();
+ attr.water_mark = 1; //hold min buffers possible in Q
+ attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+ attr.priority = MM_CAMERA_SUPER_BUF_PRIORITY_LOW;
+ rc = pChannel->init(&attr, snapshot_channel_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("init snapshot channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+ NULL, NULL);
+ if (rc != NO_ERROR) {
+ LOGE("add snapshot stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addRawChannel
+ *
+ * DESCRIPTION: add a raw channel that contains a raw image stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addRawChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_RAW] != NULL) {
+ // if we had raw channel before, delete it first
+ delete m_channels[QCAMERA_CH_TYPE_RAW];
+ m_channels[QCAMERA_CH_TYPE_RAW] = NULL;
+ }
+
+ pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for raw channel");
+ return NO_MEMORY;
+ }
+
+ if (mParameters.getofflineRAW()) {
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.look_back = mParameters.getZSLBackLookCount();
+ attr.post_frame_skip = mParameters.getZSLBurstInterval();
+ attr.water_mark = 1;
+ attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+ rc = pChannel->init(&attr, raw_channel_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("init RAW channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+ } else {
+ rc = pChannel->init(NULL, NULL, NULL);
+ if (rc != NO_ERROR) {
+ LOGE("init raw channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+ }
+
+ if (!mParameters.isZSLMode()) {
+ // meta data stream always coexists with snapshot in regular RAW capture case
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+ metadata_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add metadata stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+ }
+
+ if (mParameters.getofflineRAW()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+ NULL, this);
+ } else {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+ raw_stream_cb_routine, this);
+ }
+ if (rc != NO_ERROR) {
+ LOGE("add snapshot stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+ m_channels[QCAMERA_CH_TYPE_RAW] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addZSLChannel
+ *
+ * DESCRIPTION: add a ZSL channel that contains a preview stream and
+ * a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addZSLChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraPicChannel *pChannel = NULL;
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+
+ if (m_channels[QCAMERA_CH_TYPE_ZSL] != NULL) {
+ // if we had ZSL channel before, delete it first
+ delete m_channels[QCAMERA_CH_TYPE_ZSL];
+ m_channels[QCAMERA_CH_TYPE_ZSL] = NULL;
+ }
+
+ pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for ZSL channel");
+ return NO_MEMORY;
+ }
+
+ // ZSL channel, init with bundle attr and cb
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ if (mParameters.isSceneSelectionEnabled()) {
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+ } else {
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ }
+ attr.look_back = mParameters.getZSLBackLookCount();
+ attr.post_frame_skip = mParameters.getZSLBurstInterval();
+ if (mParameters.isOEMFeatEnabled()) {
+ attr.post_frame_skip++;
+ }
+ attr.water_mark = mParameters.getZSLQueueDepth();
+ attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+ attr.user_expected_frame_id =
+ mParameters.isInstantCaptureEnabled() ? (uint8_t)mParameters.getAecFrameBoundValue() : 0;
+
+ //Enabled matched queue
+ if (isFrameSyncEnabled()) {
+ LOGH("Enabling frame sync for dual camera, camera Id: %d",
+ mCameraId);
+ attr.enable_frame_sync = 1;
+ }
+ rc = pChannel->init(&attr,
+ zsl_channel_cb,
+ this);
+ if (rc != 0) {
+ LOGE("init ZSL channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ // meta data stream always coexists with preview if applicable
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+ metadata_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add metadata stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ if (isNoDisplayMode()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+ nodisplay_preview_stream_cb_routine, this);
+ } else {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+ preview_stream_cb_routine, this);
+ pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
+ synchronous_stream_cb_routine);
+ }
+ if (rc != NO_ERROR) {
+ LOGE("add preview stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+ NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("add snapshot stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ if (!mParameters.isSecureMode()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS,
+ NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("add Analysis stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+ }
+
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+ if (raw_yuv) {
+ rc = addStreamToChannel(pChannel,
+ CAM_STREAM_TYPE_RAW,
+ NULL,
+ this);
+ if (rc != NO_ERROR) {
+ LOGE("add raw stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+ }
+
+ m_channels[QCAMERA_CH_TYPE_ZSL] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addCaptureChannel
+ *
+ * DESCRIPTION: add a capture channel that contains a snapshot stream
+ * and a postview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ * NOTE : Add this channel for regular capture usecase.
+ * For Live snapshot usecase, use addSnapshotChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addCaptureChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraPicChannel *pChannel = NULL;
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+
+ if (m_channels[QCAMERA_CH_TYPE_CAPTURE] != NULL) {
+ delete m_channels[QCAMERA_CH_TYPE_CAPTURE];
+ m_channels[QCAMERA_CH_TYPE_CAPTURE] = NULL;
+ }
+
+ pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for capture channel");
+ return NO_MEMORY;
+ }
+
+ // Capture channel, only need snapshot and postview streams start together
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ if ( mLongshotEnabled ) {
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.look_back = mParameters.getZSLBackLookCount();
+ attr.water_mark = mParameters.getZSLQueueDepth();
+ } else {
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+ }
+ attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+
+ rc = pChannel->init(&attr,
+ capture_channel_cb_routine,
+ this);
+ if (rc != NO_ERROR) {
+ LOGE("init capture channel failed, ret = %d", rc);
+ return rc;
+ }
+
+ // meta data stream always coexists with snapshot in regular capture case
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+ metadata_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add metadata stream failed, ret = %d", rc);
+ return rc;
+ }
+
+ if (mLongshotEnabled) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+ preview_stream_cb_routine, this);
+
+ if (rc != NO_ERROR) {
+ LOGE("add preview stream failed, ret = %d", rc);
+ return rc;
+ }
+ pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
+ synchronous_stream_cb_routine);
+ //Not adding the postview stream to the capture channel if Quadra CFA is enabled.
+ } else if (!mParameters.getQuadraCfa()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_POSTVIEW,
+ NULL, this);
+
+ if (rc != NO_ERROR) {
+ LOGE("add postview stream failed, ret = %d", rc);
+ return rc;
+ }
+ }
+
+ if (!mParameters.getofflineRAW()) {
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+ NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("add snapshot stream failed, ret = %d", rc);
+ return rc;
+ }
+ }
+
+ stream_cb_routine stream_cb = NULL;
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+
+ if (raw_yuv) {
+ stream_cb = snapshot_raw_stream_cb_routine;
+ }
+
+ if ((raw_yuv) || (mParameters.getofflineRAW())) {
+ rc = addStreamToChannel(pChannel,
+ CAM_STREAM_TYPE_RAW, stream_cb, this);
+ if (rc != NO_ERROR) {
+ LOGE("add raw stream failed, ret = %d", rc);
+ return rc;
+ }
+ }
+
+ m_channels[QCAMERA_CH_TYPE_CAPTURE] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addMetaDataChannel
+ *
+ * DESCRIPTION: add a meta data channel that contains a metadata stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addMetaDataChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_METADATA] != NULL) {
+ delete m_channels[QCAMERA_CH_TYPE_METADATA];
+ m_channels[QCAMERA_CH_TYPE_METADATA] = NULL;
+ }
+
+ pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for metadata channel");
+ return NO_MEMORY;
+ }
+
+ rc = pChannel->init(NULL,
+ NULL,
+ NULL);
+ if (rc != NO_ERROR) {
+ LOGE("init metadata channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+ metadata_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add metadata stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ m_channels[QCAMERA_CH_TYPE_METADATA] = pChannel;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addCallbackChannel
+ *
+ * DESCRIPTION: add a callback channel that contains a callback stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addCallbackChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL) {
+ delete m_channels[QCAMERA_CH_TYPE_CALLBACK];
+ m_channels[QCAMERA_CH_TYPE_CALLBACK] = NULL;
+ }
+
+ pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for callback channel");
+ return NO_MEMORY;
+ }
+
+ rc = pChannel->init(NULL, NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("init callback channel failed, ret = %d",
+ rc);
+ delete pChannel;
+ return rc;
+ }
+
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_CALLBACK,
+ callback_stream_cb_routine, this);
+ if (rc != NO_ERROR) {
+ LOGE("add callback stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ m_channels[QCAMERA_CH_TYPE_CALLBACK] = pChannel;
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : addAnalysisChannel
+ *
+ * DESCRIPTION: add a analysis channel that contains a analysis stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addAnalysisChannel()
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = NULL;
+
+ if (m_channels[QCAMERA_CH_TYPE_ANALYSIS] != NULL) {
+ delete m_channels[QCAMERA_CH_TYPE_ANALYSIS];
+ m_channels[QCAMERA_CH_TYPE_ANALYSIS] = NULL;
+ }
+
+ pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for metadata channel");
+ return NO_MEMORY;
+ }
+
+ rc = pChannel->init(NULL, NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("init Analysis channel failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS,
+ NULL, this);
+ if (rc != NO_ERROR) {
+ LOGE("add Analysis stream failed, ret = %d", rc);
+ delete pChannel;
+ return rc;
+ }
+
+ m_channels[QCAMERA_CH_TYPE_ANALYSIS] = pChannel;
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : getPPConfig
+ *
+ * DESCRIPTION: get Post processing configaration data
+ *
+ * PARAMETERS :
+ * @pp config: pp config structure pointer,
+ * @curIndex: current pp channel index
+ * @multipass: Flag if multipass prcessing enabled.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::getPPConfig(cam_pp_feature_config_t &pp_config,
+ int8_t curIndex, bool multipass)
+{
+ int32_t rc = NO_ERROR;
+ int32_t feature_set = 0;
+
+ if (multipass) {
+ LOGW("Multi pass enabled. Total Pass = %d, cur index = %d",
+ mParameters.getReprocCount(), curIndex);
+ }
+
+ LOGH("Supported pproc feature mask = %llx",
+ gCamCapability[mCameraId]->qcom_supported_feature_mask);
+ cam_feature_mask_t feature_mask = gCamCapability[mCameraId]->qcom_supported_feature_mask;
+ int32_t zoomLevel = mParameters.getParmZoomLevel();
+ uint32_t rotation = mParameters.getJpegRotation();
+ int32_t effect = mParameters.getEffectValue();
+
+ pp_config.cur_reproc_count = curIndex + 1;
+ pp_config.total_reproc_count = mParameters.getReprocCount();
+
+ //Checking what feature mask to enable
+ if (curIndex == 0) {
+ if (mParameters.getQuadraCfa()) {
+ feature_set = 2;
+ } else {
+ feature_set = 0;
+ }
+ } else if (curIndex == 1) {
+ if (mParameters.getQuadraCfa()) {
+ feature_set = 0;
+ } else {
+ feature_set = 1;
+ }
+ }
+
+ switch(feature_set) {
+ case 0:
+ //Configure feature mask for first pass of reprocessing
+ //check if any effects are enabled
+ if ((CAM_EFFECT_MODE_OFF != effect) &&
+ (feature_mask & CAM_QCOM_FEATURE_EFFECT)) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_EFFECT;
+ pp_config.effect = effect;
+ }
+
+ //check for features that need to be enabled by default like sharpness
+ //(if supported by hw).
+ if ((feature_mask & CAM_QCOM_FEATURE_SHARPNESS) &&
+ !mParameters.isOptiZoomEnabled()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+ pp_config.sharpness = mParameters.getSharpness();
+ }
+
+ //check if zoom is enabled
+ if ((zoomLevel > 0) && (feature_mask & CAM_QCOM_FEATURE_CROP)) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+ }
+
+ if (mParameters.isWNREnabled() &&
+ (feature_mask & CAM_QCOM_FEATURE_DENOISE2D)) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+ pp_config.denoise2d.denoise_enable = 1;
+ pp_config.denoise2d.process_plates =
+ mParameters.getDenoiseProcessPlate(CAM_INTF_PARM_WAVELET_DENOISE);
+ }
+
+ if (isCACEnabled()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CAC;
+ }
+
+ //check if rotation is required
+ if ((feature_mask & CAM_QCOM_FEATURE_ROTATION) && (rotation > 0)) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+ if (rotation == 0) {
+ pp_config.rotation = ROTATE_0;
+ } else if (rotation == 90) {
+ pp_config.rotation = ROTATE_90;
+ } else if (rotation == 180) {
+ pp_config.rotation = ROTATE_180;
+ } else if (rotation == 270) {
+ pp_config.rotation = ROTATE_270;
+ }
+ }
+
+ if (mParameters.isHDREnabled()){
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
+ pp_config.hdr_param.hdr_enable = 1;
+ pp_config.hdr_param.hdr_need_1x = mParameters.isHDR1xFrameEnabled();
+ pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_HDR;
+ pp_config.hdr_param.hdr_enable = 0;
+ }
+
+ //check if scaling is enabled
+ if ((feature_mask & CAM_QCOM_FEATURE_SCALE) &&
+ mParameters.isReprocScaleEnabled() &&
+ mParameters.isUnderReprocScaling()){
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+ mParameters.getPicSizeFromAPK(
+ pp_config.scale_param.output_width,
+ pp_config.scale_param.output_height);
+ }
+
+ if(mParameters.isUbiFocusEnabled()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_UBIFOCUS;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_UBIFOCUS;
+ }
+
+ if(mParameters.isUbiRefocus()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_REFOCUS;
+ pp_config.misc_buf_param.misc_buffer_index = 0;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_REFOCUS;
+ }
+
+ if(mParameters.isChromaFlashEnabled()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CHROMA_FLASH;
+ pp_config.flash_value = CAM_FLASH_ON;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CHROMA_FLASH;
+ }
+
+ if(mParameters.isOptiZoomEnabled() && (0 <= zoomLevel)) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_OPTIZOOM;
+ pp_config.zoom_level = (uint8_t) zoomLevel;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_OPTIZOOM;
+ }
+
+ if (mParameters.getofflineRAW()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_RAW_PROCESSING;
+ }
+
+ if (mParameters.isTruePortraitEnabled()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_TRUEPORTRAIT;
+ pp_config.misc_buf_param.misc_buffer_index = 0;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_TRUEPORTRAIT;
+ }
+
+ if(mParameters.isStillMoreEnabled()) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_STILLMORE;
+ } else {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_STILLMORE;
+ }
+
+ if (mParameters.isOEMFeatEnabled()) {
+ pp_config.feature_mask |= CAM_OEM_FEATURE_1;
+ }
+
+ if (mParameters.getCDSMode() != CAM_CDS_MODE_OFF) {
+ if (feature_mask & CAM_QCOM_FEATURE_DSDN) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
+ } else {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CDS;
+ }
+ }
+
+ if ((multipass) &&
+ (m_postprocessor.getPPChannelCount() > 1)
+ && (!mParameters.getQuadraCfa())) {
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_PP_PASS_2;
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_DSDN;
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+ } else {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+ }
+
+ cam_dimension_t thumb_src_dim;
+ cam_dimension_t thumb_dst_dim;
+ mParameters.getThumbnailSize(&(thumb_dst_dim.width), &(thumb_dst_dim.height));
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_POSTVIEW,thumb_src_dim);
+ if ((thumb_dst_dim.width != thumb_src_dim.width) ||
+ (thumb_dst_dim.height != thumb_src_dim.height)) {
+ if (thumb_dst_dim.width != 0 && thumb_dst_dim.height != 0) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+ }
+ }
+
+ break;
+
+ case 1:
+ //Configure feature mask for second pass of reprocessing
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_PASS_2;
+ if ((feature_mask & CAM_QCOM_FEATURE_ROTATION) && (rotation > 0)) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+ if (rotation == 0) {
+ pp_config.rotation = ROTATE_0;
+ } else if (rotation == 90) {
+ pp_config.rotation = ROTATE_90;
+ } else if (rotation == 180) {
+ pp_config.rotation = ROTATE_180;
+ } else if (rotation == 270) {
+ pp_config.rotation = ROTATE_270;
+ }
+ }
+ if (mParameters.getCDSMode() != CAM_CDS_MODE_OFF) {
+ if (feature_mask & CAM_QCOM_FEATURE_DSDN) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
+ } else {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CDS;
+ }
+ }
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_RAW_PROCESSING;
+ pp_config.feature_mask &= ~CAM_QCOM_FEATURE_METADATA_PROCESSING;
+ break;
+
+ case 2:
+ //Setting feature for Quadra CFA
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_QUADRA_CFA;
+ break;
+
+ }
+
+ LOGH("pproc feature mask set = %llx pass count = %d",
+ pp_config.feature_mask, curIndex);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addReprocChannel
+ *
+ * DESCRIPTION: add a reprocess channel that will do reprocess on frames
+ * coming from input channel
+ *
+ * PARAMETERS :
+ * @pInputChannel : ptr to input channel whose frames will be post-processed
+ * @cur_channel_index : Current channel index in multipass
+ *
+ * RETURN : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addReprocChannel(
+ QCameraChannel *pInputChannel, int8_t cur_channel_index)
+{
+ int32_t rc = NO_ERROR;
+ QCameraReprocessChannel *pChannel = NULL;
+ uint32_t burst_cnt = mParameters.getNumOfSnapshots();
+
+ if (pInputChannel == NULL) {
+ LOGE("input channel obj is NULL");
+ return NULL;
+ }
+
+ pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for reprocess channel");
+ return NULL;
+ }
+
+ // Capture channel, only need snapshot and postview streams start together
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+ attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+ rc = pChannel->init(&attr,
+ postproc_channel_cb_routine,
+ this);
+ if (rc != NO_ERROR) {
+ LOGE("init reprocess channel failed, ret = %d", rc);
+ delete pChannel;
+ return NULL;
+ }
+
+ // pp feature config
+ cam_pp_feature_config_t pp_config;
+ memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+ rc = getPPConfig(pp_config, cur_channel_index,
+ ((mParameters.getReprocCount() > 1) ? TRUE : FALSE));
+ if (rc != NO_ERROR){
+ LOGE("Error while creating PP config");
+ delete pChannel;
+ return NULL;
+ }
+
+ uint8_t minStreamBufNum = getBufNumRequired(CAM_STREAM_TYPE_OFFLINE_PROC);
+
+ //WNR and HDR happen inline. No extra buffers needed.
+ cam_feature_mask_t temp_feature_mask = pp_config.feature_mask;
+ temp_feature_mask &= ~CAM_QCOM_FEATURE_HDR;
+ if (temp_feature_mask && mParameters.isHDREnabled()) {
+ minStreamBufNum = (uint8_t)(1 + mParameters.getNumOfExtraHDRInBufsIfNeeded());
+ }
+
+ if (mParameters.isStillMoreEnabled()) {
+ cam_still_more_t stillmore_config = mParameters.getStillMoreSettings();
+ pp_config.burst_cnt = stillmore_config.burst_count;
+ LOGH("Stillmore burst %d", pp_config.burst_cnt);
+
+ // getNumOfExtraBuffersForImageProc returns 1 less buffer assuming
+ // number of capture is already added. In the case of liveshot,
+ // stillmore burst is 1. This is to account for the premature decrement
+ if (mParameters.getNumOfExtraBuffersForImageProc() == 0) {
+ minStreamBufNum += 1;
+ }
+ }
+
+ if (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_3) {
+ minStreamBufNum += mParameters.getReprocCount() - 1;
+ burst_cnt = mParameters.getReprocCount();
+ if (cur_channel_index == 0) {
+ pChannel->setReprocCount(2);
+ } else {
+ pChannel->setReprocCount(1);
+ }
+ } else {
+ pChannel->setReprocCount(1);
+ }
+
+ // Add non inplace image lib buffers only when ppproc is present,
+ // becuase pproc is non inplace and input buffers for img lib
+ // are output for pproc and this number of extra buffers is required
+ // If pproc is not there, input buffers for imglib are from snapshot stream
+ uint8_t imglib_extra_bufs = mParameters.getNumOfExtraBuffersForImageProc();
+ if (temp_feature_mask && imglib_extra_bufs) {
+ // 1 is added because getNumOfExtraBuffersForImageProc returns extra
+ // buffers assuming number of capture is already added
+ minStreamBufNum = (uint8_t)(minStreamBufNum + imglib_extra_bufs + 1);
+ }
+
+ //Mask out features that are already processed in snapshot stream.
+ cam_feature_mask_t snapshot_feature_mask = 0;
+ mParameters.getStreamPpMask(CAM_STREAM_TYPE_SNAPSHOT, snapshot_feature_mask);
+
+ pp_config.feature_mask &= ~snapshot_feature_mask;
+ LOGH("Snapshot feature mask: 0x%llx, reproc feature mask: 0x%llx",
+ snapshot_feature_mask, pp_config.feature_mask);
+
+ bool offlineReproc = isRegularCapture();
+ if (m_postprocessor.mOfflineDataBufs != NULL) {
+ offlineReproc = TRUE;
+ }
+
+ cam_padding_info_t paddingInfo = gCamCapability[mCameraId]->padding_info;
+ paddingInfo.offset_info.offset_x = 0;
+ paddingInfo.offset_info.offset_y = 0;
+ rc = pChannel->addReprocStreamsFromSource(*this,
+ pp_config,
+ pInputChannel,
+ minStreamBufNum,
+ burst_cnt,
+ &paddingInfo,
+ mParameters,
+ mLongshotEnabled,
+ offlineReproc);
+ if (rc != NO_ERROR) {
+ delete pChannel;
+ return NULL;
+ }
+
+ return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION : addOfflineReprocChannel
+ *
+ * DESCRIPTION: add a offline reprocess channel contains one reproc stream,
+ * that will do reprocess on frames coming from external images
+ *
+ * PARAMETERS :
+ * @img_config : offline reporcess image info
+ * @pp_feature : pp feature config
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addOfflineReprocChannel(
+ cam_pp_offline_src_config_t &img_config,
+ cam_pp_feature_config_t &pp_feature,
+ stream_cb_routine stream_cb,
+ void *userdata)
+{
+ int32_t rc = NO_ERROR;
+ QCameraReprocessChannel *pChannel = NULL;
+
+ pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+ mCameraHandle->ops);
+ if (NULL == pChannel) {
+ LOGE("no mem for reprocess channel");
+ return NULL;
+ }
+
+ rc = pChannel->init(NULL, NULL, NULL);
+ if (rc != NO_ERROR) {
+ LOGE("init reprocess channel failed, ret = %d", rc);
+ delete pChannel;
+ return NULL;
+ }
+
+ QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+ if (pStreamInfo == NULL) {
+ LOGE("no mem for stream info buf");
+ delete pChannel;
+ return NULL;
+ }
+
+ cam_stream_info_t *streamInfoBuf = (cam_stream_info_t *)pStreamInfo->getPtr(0);
+ memset(streamInfoBuf, 0, sizeof(cam_stream_info_t));
+ streamInfoBuf->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+ streamInfoBuf->fmt = img_config.input_fmt;
+ streamInfoBuf->dim = img_config.input_dim;
+ streamInfoBuf->buf_planes = img_config.input_buf_planes;
+ streamInfoBuf->streaming_mode = CAM_STREAMING_MODE_BURST;
+ streamInfoBuf->num_of_burst = img_config.num_of_bufs;
+
+ streamInfoBuf->reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+ streamInfoBuf->reprocess_config.offline = img_config;
+ streamInfoBuf->reprocess_config.pp_feature_config = pp_feature;
+
+ rc = pChannel->addStream(*this,
+ pStreamInfo, NULL, img_config.num_of_bufs,
+ &gCamCapability[mCameraId]->padding_info,
+ stream_cb, userdata, false);
+
+ if (rc != NO_ERROR) {
+ LOGE("add reprocess stream failed, ret = %d", rc);
+ delete pChannel;
+ return NULL;
+ }
+
+ return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION : addChannel
+ *
+ * DESCRIPTION: add a channel by its type
+ *
+ * PARAMETERS :
+ * @ch_type : channel type
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
+{
+ int32_t rc = UNKNOWN_ERROR;
+ switch (ch_type) {
+ case QCAMERA_CH_TYPE_ZSL:
+ rc = addZSLChannel();
+ break;
+ case QCAMERA_CH_TYPE_CAPTURE:
+ rc = addCaptureChannel();
+ break;
+ case QCAMERA_CH_TYPE_PREVIEW:
+ rc = addPreviewChannel();
+ break;
+ case QCAMERA_CH_TYPE_VIDEO:
+ rc = addVideoChannel();
+ break;
+ case QCAMERA_CH_TYPE_SNAPSHOT:
+ rc = addSnapshotChannel();
+ break;
+ case QCAMERA_CH_TYPE_RAW:
+ rc = addRawChannel();
+ break;
+ case QCAMERA_CH_TYPE_METADATA:
+ rc = addMetaDataChannel();
+ break;
+ case QCAMERA_CH_TYPE_CALLBACK:
+ rc = addCallbackChannel();
+ break;
+ case QCAMERA_CH_TYPE_ANALYSIS:
+ rc = addAnalysisChannel();
+ break;
+ default:
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : delChannel
+ *
+ * DESCRIPTION: delete a channel by its type
+ *
+ * PARAMETERS :
+ * @ch_type : channel type
+ * @destroy : delete context as well
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::delChannel(qcamera_ch_type_enum_t ch_type,
+ bool destroy)
+{
+ if (m_channels[ch_type] != NULL) {
+ if (destroy) {
+ delete m_channels[ch_type];
+ m_channels[ch_type] = NULL;
+ } else {
+ m_channels[ch_type]->deleteChannel();
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : startChannel
+ *
+ * DESCRIPTION: start a channel by its type
+ *
+ * PARAMETERS :
+ * @ch_type : channel type
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)
+{
+ int32_t rc = UNKNOWN_ERROR;
+ if (m_channels[ch_type] != NULL) {
+ rc = m_channels[ch_type]->start();
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stopChannel
+ *
+ * DESCRIPTION: stop a channel by its type
+ *
+ * PARAMETERS :
+ * @ch_type : channel type
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopChannel(qcamera_ch_type_enum_t ch_type)
+{
+ int32_t rc = UNKNOWN_ERROR;
+ if (m_channels[ch_type] != NULL) {
+ rc = m_channels[ch_type]->stop();
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : preparePreview
+ *
+ * DESCRIPTION: add channels needed for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::preparePreview()
+{
+ ATRACE_CALL();
+ int32_t rc = NO_ERROR;
+
+ LOGI("E");
+ rc = mParameters.setStreamConfigure(false, false, false);
+ if (rc != NO_ERROR) {
+ LOGE("setStreamConfigure failed %d", rc);
+ return rc;
+ }
+
+ if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
+ rc = addChannel(QCAMERA_CH_TYPE_ZSL);
+ if (rc != NO_ERROR) {
+ LOGE("failed!! rc = %d", rc);
+ return rc;
+ }
+
+ if (mParameters.isUBWCEnabled()) {
+ cam_format_t fmt;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_PREVIEW, fmt);
+ if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+ rc = addChannel(QCAMERA_CH_TYPE_CALLBACK);
+ if (rc != NO_ERROR) {
+ delChannel(QCAMERA_CH_TYPE_ZSL);
+ LOGE("failed!! rc = %d", rc);
+ return rc;
+ }
+ }
+ }
+
+ if (mParameters.getofflineRAW() && !mParameters.getQuadraCfa()) {
+ addChannel(QCAMERA_CH_TYPE_RAW);
+ }
+ } else {
+ bool recordingHint = mParameters.getRecordingHintValue();
+ if(!isRdiMode() && recordingHint) {
+ //stop face detection,longshot,etc if turned ON in Camera mode
+#ifndef VANILLA_HAL
+ int32_t arg; //dummy arg
+ if (isLongshotEnabled()) {
+ sendCommand(CAMERA_CMD_LONGSHOT_OFF, arg, arg);
+ }
+ if (mParameters.isFaceDetectionEnabled()
+ && (!mParameters.fdModeInVideo())) {
+ sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, arg, arg);
+ }
+ if (mParameters.isHistogramEnabled()) {
+ sendCommand(CAMERA_CMD_HISTOGRAM_OFF, arg, arg);
+ }
+#endif
+ //Don't create snapshot channel for liveshot, if low power mode is set.
+ //Use video stream instead.
+ if (!isLowPowerMode()) {
+ rc = addChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ if (rc != NO_ERROR) {
+ return rc;
+ }
+ }
+
+ rc = addChannel(QCAMERA_CH_TYPE_VIDEO);
+ if (rc != NO_ERROR) {
+ delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ LOGE("failed!! rc = %d", rc);
+ return rc;
+ }
+ }
+
+ rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
+ if (!isRdiMode() && (rc != NO_ERROR)) {
+ if (recordingHint) {
+ delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ delChannel(QCAMERA_CH_TYPE_VIDEO);
+ }
+ }
+
+ if (mParameters.isUBWCEnabled() && !recordingHint) {
+ cam_format_t fmt;
+ mParameters.getStreamFormat(CAM_STREAM_TYPE_PREVIEW, fmt);
+ if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+ rc = addChannel(QCAMERA_CH_TYPE_CALLBACK);
+ if (rc != NO_ERROR) {
+ delChannel(QCAMERA_CH_TYPE_PREVIEW);
+ if (!isRdiMode()) {
+ delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ delChannel(QCAMERA_CH_TYPE_VIDEO);
+ }
+ LOGE("failed!! rc = %d", rc);
+ return rc;
+ }
+ }
+ }
+
+ if (NO_ERROR != rc) {
+ delChannel(QCAMERA_CH_TYPE_PREVIEW);
+ LOGE("failed!! rc = %d", rc);
+ }
+ }
+
+ LOGI("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : unpreparePreview
+ *
+ * DESCRIPTION: delete channels for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unpreparePreview()
+{
+ delChannel(QCAMERA_CH_TYPE_ZSL);
+ delChannel(QCAMERA_CH_TYPE_PREVIEW);
+ delChannel(QCAMERA_CH_TYPE_VIDEO);
+ delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+ delChannel(QCAMERA_CH_TYPE_CALLBACK);
+ delChannel(QCAMERA_CH_TYPE_RAW);
+}
+
+/*===========================================================================
+ * FUNCTION : playShutter
+ *
+ * DESCRIPTION: send request to play shutter sound
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::playShutter(){
+ if (mNotifyCb == NULL ||
+ msgTypeEnabledWithLock(CAMERA_MSG_SHUTTER) == 0){
+ LOGD("shutter msg not enabled or NULL cb");
+ return;
+ }
+ LOGH("CAMERA_MSG_SHUTTER ");
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_SHUTTER;
+ cbArg.ext1 = 0;
+ cbArg.ext2 = false;
+ m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION : getChannelByHandle
+ *
+ * DESCRIPTION: return a channel by its handle
+ *
+ * PARAMETERS :
+ * @channelHandle : channel handle
+ *
+ * RETURN : a channel obj if found, NULL if not found
+ *==========================================================================*/
+QCameraChannel *QCamera2HardwareInterface::getChannelByHandle(uint32_t channelHandle)
+{
+ for(int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+ if (m_channels[i] != NULL &&
+ m_channels[i]->getMyHandle() == channelHandle) {
+ return m_channels[i];
+ }
+ }
+
+ return NULL;
+}
+/*===========================================================================
+ * FUNCTION : needPreviewFDCallback
+ *
+ * DESCRIPTION: decides if needPreviewFDCallback
+ *
+ * PARAMETERS :
+ * @num_faces : number of faces
+ *
+ * RETURN : bool type of status
+ * true -- success
+ * fale -- failure code
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needPreviewFDCallback(uint8_t num_faces)
+{
+ if (num_faces == 0 && mNumPreviewFaces == 0) {
+ return false;
+ }
+
+ return true;
+}
+
+/*===========================================================================
+ * FUNCTION : processFaceDetectionReuslt
+ *
+ * DESCRIPTION: process face detection reuslt
+ *
+ * PARAMETERS :
+ * @faces_data : ptr to face processing result struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processFaceDetectionResult(cam_faces_data_t *faces_data)
+{
+ if (!mParameters.isFaceDetectionEnabled()) {
+ LOGH("FaceDetection not enabled, no ops here");
+ return NO_ERROR;
+ }
+
+ qcamera_face_detect_type_t fd_type = faces_data->detection_data.fd_type;
+ cam_face_detection_data_t *detect_data = &(faces_data->detection_data);
+ if ((NULL == mDataCb) ||
+ (fd_type == QCAMERA_FD_PREVIEW && !msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA)) ||
+ (!needPreviewFDCallback(detect_data->num_faces_detected))
+#ifndef VANILLA_HAL
+ || (fd_type == QCAMERA_FD_SNAPSHOT && !msgTypeEnabled(CAMERA_MSG_META_DATA))
+#endif
+ ) {
+ LOGH("metadata msgtype not enabled, no ops here");
+ return NO_ERROR;
+ }
+
+ if ((fd_type == QCAMERA_FD_PREVIEW) && (detect_data->update_flag == FALSE)) {
+ // Don't send callback to app if this is skipped by fd at backend
+ return NO_ERROR;
+ }
+
+ cam_dimension_t display_dim;
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_PREVIEW, display_dim);
+ if (display_dim.width <= 0 || display_dim.height <= 0) {
+ LOGE("Invalid preview width or height (%d x %d)",
+ display_dim.width, display_dim.height);
+ return UNKNOWN_ERROR;
+ }
+
+ // process face detection result
+ // need separate face detection in preview or snapshot type
+ size_t faceResultSize = 0;
+ size_t data_len = 0;
+ if(fd_type == QCAMERA_FD_PREVIEW){
+ //fd for preview frames
+ faceResultSize = sizeof(camera_frame_metadata_t);
+ faceResultSize += sizeof(camera_face_t) * MAX_ROI;
+ }else if(fd_type == QCAMERA_FD_SNAPSHOT){
+#ifndef VANILLA_HAL
+ // fd for snapshot frames
+ //check if face is detected in this frame
+ if(detect_data->num_faces_detected > 0){
+ data_len = sizeof(camera_frame_metadata_t) +
+ sizeof(camera_face_t) * detect_data->num_faces_detected;
+ }else{
+ //no face
+ data_len = 0;
+ }
+#endif
+ faceResultSize = 1 *sizeof(int) //meta data type
+ + 1 *sizeof(int) // meta data len
+ + data_len; //data
+ }
+
+ camera_memory_t *faceResultBuffer = mGetMemory(-1,
+ faceResultSize,
+ 1,
+ mCallbackCookie);
+ if ( NULL == faceResultBuffer ) {
+ LOGE("Not enough memory for face result data");
+ return NO_MEMORY;
+ }
+
+ unsigned char *pFaceResult = ( unsigned char * ) faceResultBuffer->data;
+ memset(pFaceResult, 0, faceResultSize);
+ unsigned char *faceData = NULL;
+ if(fd_type == QCAMERA_FD_PREVIEW){
+ faceData = pFaceResult;
+ mNumPreviewFaces = detect_data->num_faces_detected;
+ }else if(fd_type == QCAMERA_FD_SNAPSHOT){
+#ifndef VANILLA_HAL
+ //need fill meta type and meta data len first
+ int *data_header = (int* )pFaceResult;
+ data_header[0] = CAMERA_META_DATA_FD;
+ data_header[1] = (int)data_len;
+
+ if(data_len <= 0){
+ //if face is not valid or do not have face, return
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_META_DATA;
+ cbArg.data = faceResultBuffer;
+ cbArg.user_data = faceResultBuffer;
+ cbArg.cookie = this;
+ cbArg.release_cb = releaseCameraMemory;
+ int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE("fail sending notification");
+ faceResultBuffer->release(faceResultBuffer);
+ }
+ return rc;
+ }
+#endif
+ faceData = pFaceResult + 2 *sizeof(int); //skip two int length
+ }
+
+ camera_frame_metadata_t *roiData = (camera_frame_metadata_t * ) faceData;
+ camera_face_t *faces = (camera_face_t *) ( faceData + sizeof(camera_frame_metadata_t) );
+
+ roiData->number_of_faces = detect_data->num_faces_detected;
+ roiData->faces = faces;
+ if (roiData->number_of_faces > 0) {
+ for (int i = 0; i < roiData->number_of_faces; i++) {
+ faces[i].id = detect_data->faces[i].face_id;
+ faces[i].score = detect_data->faces[i].score;
+
+ // left
+ faces[i].rect[0] = MAP_TO_DRIVER_COORDINATE(
+ detect_data->faces[i].face_boundary.left,
+ display_dim.width, 2000, -1000);
+
+ // top
+ faces[i].rect[1] = MAP_TO_DRIVER_COORDINATE(
+ detect_data->faces[i].face_boundary.top,
+ display_dim.height, 2000, -1000);
+
+ // right
+ faces[i].rect[2] = faces[i].rect[0] +
+ MAP_TO_DRIVER_COORDINATE(
+ detect_data->faces[i].face_boundary.width,
+ display_dim.width, 2000, 0);
+
+ // bottom
+ faces[i].rect[3] = faces[i].rect[1] +
+ MAP_TO_DRIVER_COORDINATE(
+ detect_data->faces[i].face_boundary.height,
+ display_dim.height, 2000, 0);
+
+ if (faces_data->landmark_valid) {
+ // Center of left eye
+ faces[i].left_eye[0] = MAP_TO_DRIVER_COORDINATE(
+ faces_data->landmark_data.face_landmarks[i].left_eye_center.x,
+ display_dim.width, 2000, -1000);
+ faces[i].left_eye[1] = MAP_TO_DRIVER_COORDINATE(
+ faces_data->landmark_data.face_landmarks[i].left_eye_center.y,
+ display_dim.height, 2000, -1000);
+
+ // Center of right eye
+ faces[i].right_eye[0] = MAP_TO_DRIVER_COORDINATE(
+ faces_data->landmark_data.face_landmarks[i].right_eye_center.x,
+ display_dim.width, 2000, -1000);
+ faces[i].right_eye[1] = MAP_TO_DRIVER_COORDINATE(
+ faces_data->landmark_data.face_landmarks[i].right_eye_center.y,
+ display_dim.height, 2000, -1000);
+
+ // Center of mouth
+ faces[i].mouth[0] = MAP_TO_DRIVER_COORDINATE(
+ faces_data->landmark_data.face_landmarks[i].mouth_center.x,
+ display_dim.width, 2000, -1000);
+ faces[i].mouth[1] = MAP_TO_DRIVER_COORDINATE(
+ faces_data->landmark_data.face_landmarks[i].mouth_center.y,
+ display_dim.height, 2000, -1000);
+ } else {
+ // return -2000 if invalid
+ faces[i].left_eye[0] = -2000;
+ faces[i].left_eye[1] = -2000;
+
+ faces[i].right_eye[0] = -2000;
+ faces[i].right_eye[1] = -2000;
+
+ faces[i].mouth[0] = -2000;
+ faces[i].mouth[1] = -2000;
+ }
+
+#ifndef VANILLA_HAL
+#ifdef TARGET_TS_MAKEUP
+ mFaceRect.left = detect_data->faces[i].face_boundary.left;
+ mFaceRect.top = detect_data->faces[i].face_boundary.top;
+ mFaceRect.right = detect_data->faces[i].face_boundary.width+mFaceRect.left;
+ mFaceRect.bottom = detect_data->faces[i].face_boundary.height+mFaceRect.top;
+#endif
+ if (faces_data->smile_valid) {
+ faces[i].smile_degree = faces_data->smile_data.smile[i].smile_degree;
+ faces[i].smile_score = faces_data->smile_data.smile[i].smile_confidence;
+ }
+ if (faces_data->blink_valid) {
+ faces[i].blink_detected = faces_data->blink_data.blink[i].blink_detected;
+ faces[i].leye_blink = faces_data->blink_data.blink[i].left_blink;
+ faces[i].reye_blink = faces_data->blink_data.blink[i].right_blink;
+ }
+ if (faces_data->recog_valid) {
+ faces[i].face_recognised = faces_data->recog_data.face_rec[i].face_recognised;
+ }
+ if (faces_data->gaze_valid) {
+ faces[i].gaze_angle = faces_data->gaze_data.gaze[i].gaze_angle;
+ faces[i].updown_dir = faces_data->gaze_data.gaze[i].updown_dir;
+ faces[i].leftright_dir = faces_data->gaze_data.gaze[i].leftright_dir;
+ faces[i].roll_dir = faces_data->gaze_data.gaze[i].roll_dir;
+ faces[i].left_right_gaze = faces_data->gaze_data.gaze[i].left_right_gaze;
+ faces[i].top_bottom_gaze = faces_data->gaze_data.gaze[i].top_bottom_gaze;
+ }
+#endif
+
+ }
+ }
+ else{
+#ifdef TARGET_TS_MAKEUP
+ memset(&mFaceRect,-1,sizeof(mFaceRect));
+#endif
+ }
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ if(fd_type == QCAMERA_FD_PREVIEW){
+ cbArg.msg_type = CAMERA_MSG_PREVIEW_METADATA;
+ }
+#ifndef VANILLA_HAL
+ else if(fd_type == QCAMERA_FD_SNAPSHOT){
+ cbArg.msg_type = CAMERA_MSG_META_DATA;
+ }
+#endif
+ cbArg.data = faceResultBuffer;
+ cbArg.metadata = roiData;
+ cbArg.user_data = faceResultBuffer;
+ cbArg.cookie = this;
+ cbArg.release_cb = releaseCameraMemory;
+ int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE("fail sending notification");
+ faceResultBuffer->release(faceResultBuffer);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseCameraMemory
+ *
+ * DESCRIPTION: releases camera memory objects
+ *
+ * PARAMETERS :
+ * @data : buffer to be released
+ * @cookie : context data
+ * @cbStatus: callback status
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::releaseCameraMemory(void *data,
+ void */*cookie*/,
+ int32_t /*cbStatus*/)
+{
+ camera_memory_t *mem = ( camera_memory_t * ) data;
+ if ( NULL != mem ) {
+ mem->release(mem);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : returnStreamBuffer
+ *
+ * DESCRIPTION: returns back a stream buffer
+ *
+ * PARAMETERS :
+ * @data : buffer to be released
+ * @cookie : context data
+ * @cbStatus: callback status
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::returnStreamBuffer(void *data,
+ void *cookie,
+ int32_t /*cbStatus*/)
+{
+ QCameraStream *stream = ( QCameraStream * ) cookie;
+ int idx = *((int *)data);
+ if ((NULL != stream) && (0 <= idx)) {
+ stream->bufDone((uint32_t)idx);
+ } else {
+ LOGE("Cannot return buffer %d %p", idx, cookie);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : processHistogramStats
+ *
+ * DESCRIPTION: process histogram stats
+ *
+ * PARAMETERS :
+ * @hist_data : ptr to histogram stats struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHistogramStats(
+ __unused cam_hist_stats_t &stats_data)
+{
+#ifndef VANILLA_HAL
+ if (!mParameters.isHistogramEnabled()) {
+ LOGH("Histogram not enabled, no ops here");
+ return NO_ERROR;
+ }
+
+ camera_memory_t *histBuffer = mGetMemory(-1,
+ sizeof(cam_histogram_data_t),
+ 1,
+ mCallbackCookie);
+ if ( NULL == histBuffer ) {
+ LOGE("Not enough memory for histogram data");
+ return NO_MEMORY;
+ }
+
+ cam_histogram_data_t *pHistData = (cam_histogram_data_t *)histBuffer->data;
+ if (pHistData == NULL) {
+ LOGE("memory data ptr is NULL");
+ return UNKNOWN_ERROR;
+ }
+
+ switch (stats_data.type) {
+ case CAM_HISTOGRAM_TYPE_BAYER:
+ *pHistData = stats_data.bayer_stats.gb_stats;
+ break;
+ case CAM_HISTOGRAM_TYPE_YUV:
+ *pHistData = stats_data.yuv_stats;
+ break;
+ }
+
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_STATS_DATA;
+ cbArg.data = histBuffer;
+ cbArg.user_data = histBuffer;
+ cbArg.cookie = this;
+ cbArg.release_cb = releaseCameraMemory;
+ int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE("fail sending notification");
+ histBuffer->release(histBuffer);
+ }
+#endif
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : calcThermalLevel
+ *
+ * DESCRIPTION: Calculates the target fps range depending on
+ * the thermal level.
+ * Note that this function can be called from QCameraParametersIntf
+ * while mutex is held. So it should not call back into
+ * QCameraParametersIntf causing deadlock.
+ *
+ * PARAMETERS :
+ * @level : received thermal level
+ * @minFPS : minimum configured fps range
+ * @maxFPS : maximum configured fps range
+ * @minVideoFps: minimum configured fps range
+ * @maxVideoFps: maximum configured fps range
+ * @adjustedRange : target fps range
+ * @skipPattern : target skip pattern
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::calcThermalLevel(
+ qcamera_thermal_level_enum_t level,
+ const int minFPSi,
+ const int maxFPSi,
+ const float &minVideoFps,
+ const float &maxVideoFps,
+ cam_fps_range_t &adjustedRange,
+ enum msm_vfe_frame_skip_pattern &skipPattern)
+{
+ const float minFPS = (float)minFPSi;
+ const float maxFPS = (float)maxFPSi;
+
+ LOGH("level: %d, preview minfps %f, preview maxfpS %f, "
+ "video minfps %f, video maxfpS %f",
+ level, minFPS, maxFPS, minVideoFps, maxVideoFps);
+
+ switch(level) {
+ case QCAMERA_THERMAL_NO_ADJUSTMENT:
+ {
+ adjustedRange.min_fps = minFPS / 1000.0f;
+ adjustedRange.max_fps = maxFPS / 1000.0f;
+ adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+ adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+ skipPattern = NO_SKIP;
+ }
+ break;
+ case QCAMERA_THERMAL_SLIGHT_ADJUSTMENT:
+ {
+ adjustedRange.min_fps = minFPS / 1000.0f;
+ adjustedRange.max_fps = maxFPS / 1000.0f;
+ adjustedRange.min_fps -= 0.1f * adjustedRange.min_fps;
+ adjustedRange.max_fps -= 0.1f * adjustedRange.max_fps;
+ adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+ adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+ adjustedRange.video_min_fps -= 0.1f * adjustedRange.video_min_fps;
+ adjustedRange.video_max_fps -= 0.1f * adjustedRange.video_max_fps;
+ if ( adjustedRange.min_fps < 1 ) {
+ adjustedRange.min_fps = 1;
+ }
+ if ( adjustedRange.max_fps < 1 ) {
+ adjustedRange.max_fps = 1;
+ }
+ if ( adjustedRange.video_min_fps < 1 ) {
+ adjustedRange.video_min_fps = 1;
+ }
+ if ( adjustedRange.video_max_fps < 1 ) {
+ adjustedRange.video_max_fps = 1;
+ }
+ skipPattern = EVERY_2FRAME;
+ }
+ break;
+ case QCAMERA_THERMAL_BIG_ADJUSTMENT:
+ {
+ adjustedRange.min_fps = minFPS / 1000.0f;
+ adjustedRange.max_fps = maxFPS / 1000.0f;
+ adjustedRange.min_fps -= 0.2f * adjustedRange.min_fps;
+ adjustedRange.max_fps -= 0.2f * adjustedRange.max_fps;
+ adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+ adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+ adjustedRange.video_min_fps -= 0.2f * adjustedRange.video_min_fps;
+ adjustedRange.video_max_fps -= 0.2f * adjustedRange.video_max_fps;
+ if ( adjustedRange.min_fps < 1 ) {
+ adjustedRange.min_fps = 1;
+ }
+ if ( adjustedRange.max_fps < 1 ) {
+ adjustedRange.max_fps = 1;
+ }
+ if ( adjustedRange.video_min_fps < 1 ) {
+ adjustedRange.video_min_fps = 1;
+ }
+ if ( adjustedRange.video_max_fps < 1 ) {
+ adjustedRange.video_max_fps = 1;
+ }
+ skipPattern = EVERY_4FRAME;
+ }
+ break;
+ case QCAMERA_THERMAL_MAX_ADJUSTMENT:
+ {
+ // Stop Preview?
+ // Set lowest min FPS for now
+ adjustedRange.min_fps = minFPS/1000.0f;
+ adjustedRange.max_fps = minFPS/1000.0f;
+ cam_capability_t *capability = gCamCapability[mCameraId];
+ for (size_t i = 0;
+ i < capability->fps_ranges_tbl_cnt;
+ i++) {
+ if (capability->fps_ranges_tbl[i].min_fps <
+ adjustedRange.min_fps) {
+ adjustedRange.min_fps =
+ capability->fps_ranges_tbl[i].min_fps;
+ adjustedRange.max_fps = adjustedRange.min_fps;
+ }
+ }
+ skipPattern = MAX_SKIP;
+ adjustedRange.video_min_fps = adjustedRange.min_fps;
+ adjustedRange.video_max_fps = adjustedRange.max_fps;
+ }
+ break;
+ case QCAMERA_THERMAL_SHUTDOWN:
+ {
+ // send error notify
+ LOGE("Received shutdown thermal level. Closing camera");
+ sendEvtNotify(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, 0);
+ }
+ break;
+ default:
+ {
+ LOGW("Invalid thermal level %d", level);
+ return BAD_VALUE;
+ }
+ break;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : recalcFPSRange
+ *
+ * DESCRIPTION: adjust the configured fps range regarding
+ * the last thermal level.
+ *
+ * PARAMETERS :
+ * @minFPS : minimum configured fps range
+ * @maxFPS : maximum configured fps range
+ * @minVideoFPS : minimum configured video fps
+ * @maxVideoFPS : maximum configured video fps
+ * @adjustedRange : target fps range
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::recalcFPSRange(int &minFPS, int &maxFPS,
+ const float &minVideoFPS, const float &maxVideoFPS,
+ cam_fps_range_t &adjustedRange)
+{
+ enum msm_vfe_frame_skip_pattern skipPattern;
+ calcThermalLevel(mThermalLevel,
+ minFPS,
+ maxFPS,
+ minVideoFPS,
+ maxVideoFPS,
+ adjustedRange,
+ skipPattern);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : updateThermalLevel
+ *
+ * DESCRIPTION: update thermal level depending on thermal events
+ *
+ * PARAMETERS :
+ * @level : thermal level
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateThermalLevel(void *thermal_level)
+{
+ int ret = NO_ERROR;
+ cam_fps_range_t adjustedRange;
+ int minFPS, maxFPS;
+ float minVideoFPS, maxVideoFPS;
+ enum msm_vfe_frame_skip_pattern skipPattern;
+ qcamera_thermal_level_enum_t level = *(qcamera_thermal_level_enum_t *)thermal_level;
+
+
+ if (!mCameraOpened) {
+ LOGH("Camera is not opened, no need to update camera parameters");
+ return NO_ERROR;
+ }
+ if (mParameters.getRecordingHintValue()) {
+ LOGH("Thermal mitigation isn't enabled in camcorder mode");
+ return NO_ERROR;
+ }
+
+ mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+ qcamera_thermal_mode thermalMode = mParameters.getThermalMode();
+ if (mParameters.isHfrMode()) {
+ cam_fps_range_t hfrFpsRange;
+ mParameters.getHfrFps(hfrFpsRange);
+ minVideoFPS = hfrFpsRange.video_min_fps;
+ maxVideoFPS = hfrFpsRange.video_max_fps;
+ } else {
+ minVideoFPS = minFPS;
+ maxVideoFPS = maxFPS;
+ }
+
+ calcThermalLevel(level, minFPS, maxFPS, minVideoFPS, maxVideoFPS,
+ adjustedRange, skipPattern);
+ mThermalLevel = level;
+
+ if (thermalMode == QCAMERA_THERMAL_ADJUST_FPS)
+ ret = mParameters.adjustPreviewFpsRange(&adjustedRange);
+ else if (thermalMode == QCAMERA_THERMAL_ADJUST_FRAMESKIP)
+ ret = mParameters.setFrameSkip(skipPattern);
+ else
+ LOGW("Incorrect thermal mode %d", thermalMode);
+
+ return ret;
+
+}
+
+/*===========================================================================
+ * FUNCTION : updateParameters
+ *
+ * DESCRIPTION: update parameters
+ *
+ * PARAMETERS :
+ * @parms : input parameters string
+ * @needRestart : output, flag to indicate if preview restart is needed
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateParameters(const char *parms, bool &needRestart)
+{
+ int rc = NO_ERROR;
+
+ String8 str = String8(parms);
+ rc = mParameters.updateParameters(str, needRestart);
+ setNeedRestart(needRestart);
+
+ // update stream based parameter settings
+ for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+ if (m_channels[i] != NULL) {
+ m_channels[i]->UpdateStreamBasedParameters(mParameters);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : commitParameterChanges
+ *
+ * DESCRIPTION: commit parameter changes to the backend to take effect
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ * NOTE : This function must be called after updateParameters.
+ * Otherwise, no change will be passed to backend to take effect.
+ *==========================================================================*/
+int QCamera2HardwareInterface::commitParameterChanges()
+{
+ int rc = NO_ERROR;
+ rc = mParameters.commitParameters();
+ if (rc == NO_ERROR) {
+ // update number of snapshot based on committed parameters setting
+ rc = mParameters.setNumOfSnapshot();
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : needDebugFps
+ *
+ * DESCRIPTION: if fps log info need to be printed out
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: need print out fps log
+ * false: no need to print out fps log
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needDebugFps()
+{
+ bool needFps = false;
+ needFps = mParameters.isFpsDebugEnabled();
+ return needFps;
+}
+
+/*===========================================================================
+ * FUNCTION : isCACEnabled
+ *
+ * DESCRIPTION: if CAC is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCACEnabled()
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.feature.cac", prop, "0");
+ int enableCAC = atoi(prop);
+ return enableCAC == 1;
+}
+
+/*===========================================================================
+ * FUNCTION : is4k2kResolution
+ *
+ * DESCRIPTION: if resolution is 4k x 2k or true 4k x 2k
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::is4k2kResolution(cam_dimension_t* resolution)
+{
+ bool enabled = false;
+ if ((resolution->width == 4096 && resolution->height == 2160) ||
+ (resolution->width == 3840 && resolution->height == 2160) ) {
+ enabled = true;
+ }
+ return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION : isPreviewRestartEnabled
+ *
+ * DESCRIPTION: Check whether preview should be restarted automatically
+ * during image capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isPreviewRestartEnabled()
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.feature.restart", prop, "0");
+ int earlyRestart = atoi(prop);
+ return earlyRestart == 1;
+}
+
+/*===========================================================================
+ * FUNCTION : needReprocess
+ *
+ * DESCRIPTION: if reprocess is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needReprocess()
+{
+ bool needReprocess = false;
+
+ if (!mParameters.isJpegPictureFormat() &&
+ !mParameters.isNV21PictureFormat()) {
+ // RAW image, no need to reprocess
+ return false;
+ }
+
+ //Disable reprocess for 4K liveshot case but enable if lowpower mode
+ if (mParameters.is4k2kVideoResolution() && mParameters.getRecordingHintValue()
+ && !isLowPowerMode()) {
+ return false;
+ }
+
+ // pp feature config
+ cam_pp_feature_config_t pp_config;
+ memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+ //Decide whether to do reprocess or not based on
+ //ppconfig obtained in the first pass.
+ getPPConfig(pp_config);
+
+ if (pp_config.feature_mask > 0) {
+ needReprocess = true;
+ }
+
+ LOGH("needReprocess %s", needReprocess ? "true" : "false");
+ return needReprocess;
+}
+
+
+/*===========================================================================
+ * FUNCTION : needRotationReprocess
+ *
+ * DESCRIPTION: if rotation needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needRotationReprocess()
+{
+ if (!mParameters.isJpegPictureFormat() &&
+ !mParameters.isNV21PictureFormat()) {
+ // RAW image, no need to reprocess
+ return false;
+ }
+
+ //Disable reprocess for 4K liveshot case
+ if (mParameters.is4k2kVideoResolution() && mParameters.getRecordingHintValue()
+ && !isLowPowerMode()) {
+ //Disable reprocess for 4K liveshot case
+ return false;
+ }
+
+ if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_ROTATION) > 0 &&
+ (mParameters.getJpegRotation() > 0)) {
+ // current rotation is not zero, and pp has the capability to process rotation
+ LOGH("need to do reprocess for rotation=%d",
+ mParameters.getJpegRotation());
+ return true;
+ }
+
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : getThumbnailSize
+ *
+ * DESCRIPTION: get user set thumbnail size
+ *
+ * PARAMETERS :
+ * @dim : output of thumbnail dimension
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::getThumbnailSize(cam_dimension_t &dim)
+{
+ mParameters.getThumbnailSize(&dim.width, &dim.height);
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegQuality
+ *
+ * DESCRIPTION: get user set jpeg quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : jpeg quality setting
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::getJpegQuality()
+{
+ uint32_t quality = 0;
+ quality = mParameters.getJpegQuality();
+ return quality;
+}
+
+/*===========================================================================
+ * FUNCTION : getExifData
+ *
+ * DESCRIPTION: get exif data to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : exif data from user setting and GPS
+ *==========================================================================*/
+QCameraExif *QCamera2HardwareInterface::getExifData()
+{
+ QCameraExif *exif = new QCameraExif();
+ if (exif == NULL) {
+ LOGE("No memory for QCameraExif");
+ return NULL;
+ }
+
+ int32_t rc = NO_ERROR;
+
+ // add exif entries
+ String8 dateTime, subSecTime;
+ rc = mParameters.getExifDateTime(dateTime, subSecTime);
+ if(rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_DATE_TIME, EXIF_ASCII,
+ (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+ exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+ (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+ exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, EXIF_ASCII,
+ (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+ exif->addEntry(EXIFTAGID_SUBSEC_TIME, EXIF_ASCII,
+ (uint32_t)(subSecTime.length() + 1), (void *)subSecTime.string());
+ exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL, EXIF_ASCII,
+ (uint32_t)(subSecTime.length() + 1), (void *)subSecTime.string());
+ exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED, EXIF_ASCII,
+ (uint32_t)(subSecTime.length() + 1), (void *)subSecTime.string());
+ } else {
+ LOGW("getExifDateTime failed");
+ }
+
+ rat_t focalLength;
+ rc = mParameters.getExifFocalLength(&focalLength);
+ if (rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
+ EXIF_RATIONAL,
+ 1,
+ (void *)&(focalLength));
+ } else {
+ LOGW("getExifFocalLength failed");
+ }
+
+ uint16_t isoSpeed = mParameters.getExifIsoSpeed();
+ if (getSensorType() != CAM_SENSOR_YUV) {
+ exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
+ EXIF_SHORT,
+ 1,
+ (void *)&(isoSpeed));
+ }
+
+ char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+ uint32_t count = 0;
+
+ /*gps data might not be available */
+ rc = mParameters.getExifGpsProcessingMethod(gpsProcessingMethod, count);
+ if(rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
+ EXIF_ASCII,
+ count,
+ (void *)gpsProcessingMethod);
+ } else {
+ LOGW("getExifGpsProcessingMethod failed");
+ }
+
+ rat_t latitude[3];
+ char latRef[2];
+ rc = mParameters.getExifLatitude(latitude, latRef);
+ if(rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_GPS_LATITUDE,
+ EXIF_RATIONAL,
+ 3,
+ (void *)latitude);
+ exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
+ EXIF_ASCII,
+ 2,
+ (void *)latRef);
+ } else {
+ LOGW("getExifLatitude failed");
+ }
+
+ rat_t longitude[3];
+ char lonRef[2];
+ rc = mParameters.getExifLongitude(longitude, lonRef);
+ if(rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
+ EXIF_RATIONAL,
+ 3,
+ (void *)longitude);
+
+ exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
+ EXIF_ASCII,
+ 2,
+ (void *)lonRef);
+ } else {
+ LOGW("getExifLongitude failed");
+ }
+
+ rat_t altitude;
+ char altRef;
+ rc = mParameters.getExifAltitude(&altitude, &altRef);
+ if(rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
+ EXIF_RATIONAL,
+ 1,
+ (void *)&(altitude));
+
+ exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
+ EXIF_BYTE,
+ 1,
+ (void *)&altRef);
+ } else {
+ LOGW("getExifAltitude failed");
+ }
+
+ char gpsDateStamp[20];
+ rat_t gpsTimeStamp[3];
+ rc = mParameters.getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp);
+ if(rc == NO_ERROR) {
+ exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
+ EXIF_ASCII,
+ (uint32_t)(strlen(gpsDateStamp) + 1),
+ (void *)gpsDateStamp);
+
+ exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
+ EXIF_RATIONAL,
+ 3,
+ (void *)gpsTimeStamp);
+ } else {
+ LOGW("getExifGpsDataTimeStamp failed");
+ }
+
+#ifdef ENABLE_MODEL_INFO_EXIF
+
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.exif.make", value, "") > 0 ||
+ property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
+ exif->addEntry(EXIFTAGID_MAKE,
+ EXIF_ASCII, strlen(value) + 1, (void *)value);
+ } else {
+ LOGW("getExifMaker failed");
+ }
+
+ if (property_get("persist.sys.exif.model", value, "") > 0 ||
+ property_get("ro.product.model", value, "QCAM-AA") > 0) {
+ exif->addEntry(EXIFTAGID_MODEL,
+ EXIF_ASCII, strlen(value) + 1, (void *)value);
+ } else {
+ LOGW("getExifModel failed");
+ }
+
+ if (property_get("ro.build.description", value, "QCAM-AA") > 0) {
+ exif->addEntry(EXIFTAGID_SOFTWARE, EXIF_ASCII,
+ (uint32_t)(strlen(value) + 1), (void *)value);
+ } else {
+ LOGW("getExifSoftware failed");
+ }
+
+#endif
+
+ if (mParameters.useJpegExifRotation()) {
+ int16_t orientation;
+ switch (mParameters.getJpegExifRotation()) {
+ case 0:
+ orientation = 1;
+ break;
+ case 90:
+ orientation = 6;
+ break;
+ case 180:
+ orientation = 3;
+ break;
+ case 270:
+ orientation = 8;
+ break;
+ default:
+ orientation = 1;
+ break;
+ }
+ exif->addEntry(EXIFTAGID_ORIENTATION,
+ EXIF_SHORT,
+ 1,
+ (void *)&orientation);
+ exif->addEntry(EXIFTAGID_TN_ORIENTATION,
+ EXIF_SHORT,
+ 1,
+ (void *)&orientation);
+ }
+
+ return exif;
+}
+
+/*===========================================================================
+ * FUNCTION : setHistogram
+ *
+ * DESCRIPTION: set if histogram should be enabled
+ *
+ * PARAMETERS :
+ * @histogram_en : bool flag if histogram should be enabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setHistogram(bool histogram_en)
+{
+ return mParameters.setHistogram(histogram_en);
+}
+
+/*===========================================================================
+ * FUNCTION : setFaceDetection
+ *
+ * DESCRIPTION: set if face detection should be enabled
+ *
+ * PARAMETERS :
+ * @enabled : bool flag if face detection should be enabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setFaceDetection(bool enabled)
+{
+ return mParameters.setFaceDetection(enabled, true);
+}
+
+/*===========================================================================
+ * FUNCTION : isCaptureShutterEnabled
+ *
+ * DESCRIPTION: Check whether shutter should be triggered immediately after
+ * capture
+ *
+ * PARAMETERS :
+ *
+ * RETURN : true - regular capture
+ * false - other type of capture
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCaptureShutterEnabled()
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.feature.shutter", prop, "0");
+ int enableShutter = atoi(prop);
+ return enableShutter == 1;
+}
+
+/*===========================================================================
+ * FUNCTION : needProcessPreviewFrame
+ *
+ * DESCRIPTION: returns whether preview frame need to be displayed
+ *
+ * PARAMETERS :
+ * @frameID : frameID of frame to be processed
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needProcessPreviewFrame(uint32_t frameID)
+{
+ return (((m_stateMachine.isPreviewRunning()) &&
+ (!isDisplayFrameToSkip(frameID)) &&
+ (!mParameters.isInstantAECEnabled())) ||
+ (isPreviewRestartEnabled()));
+}
+
+/*===========================================================================
+ * FUNCTION : needSendPreviewCallback
+ *
+ * DESCRIPTION: returns whether preview frame need to callback to APP
+ *
+ * PARAMETERS :
+ *
+ * RETURN : true - need preview frame callbck
+ * false - not send preview frame callback
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needSendPreviewCallback()
+{
+ return m_stateMachine.isPreviewRunning()
+ && (mDataCb != NULL)
+ && (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)
+ && m_stateMachine.isPreviewCallbackNeeded();
+};
+
+/*===========================================================================
+ * FUNCTION : setDisplaySkip
+ *
+ * DESCRIPTION: set range of frames to skip for preview
+ *
+ * PARAMETERS :
+ * @enabled : TRUE to start skipping frame to display
+ FALSE to stop skipping frame to display
+ * @skipCnt : Number of frame to skip. 0 by default
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::setDisplaySkip(bool enabled, uint8_t skipCnt)
+{
+ pthread_mutex_lock(&mGrallocLock);
+ if (enabled) {
+ setDisplayFrameSkip();
+ setDisplayFrameSkip(mLastPreviewFrameID + skipCnt + 1);
+ } else {
+ setDisplayFrameSkip(mFrameSkipStart, (mLastPreviewFrameID + skipCnt + 1));
+ }
+ pthread_mutex_unlock(&mGrallocLock);
+}
+
+/*===========================================================================
+ * FUNCTION : setDisplayFrameSkip
+ *
+ * DESCRIPTION: set range of frames to skip for preview
+ *
+ * PARAMETERS :
+ * @start : frameId to start skip
+ * @end : frameId to stop skip
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::setDisplayFrameSkip(uint32_t start,
+ uint32_t end)
+{
+ if (start == 0) {
+ mFrameSkipStart = 0;
+ mFrameSkipEnd = 0;
+ return;
+ }
+ if ((mFrameSkipStart == 0) || (mFrameSkipStart > start)) {
+ mFrameSkipStart = start;
+ }
+ if ((end == 0) || (end > mFrameSkipEnd)) {
+ mFrameSkipEnd = end;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : isDisplayFrameToSkip
+ *
+ * DESCRIPTION: function to determin if input frame falls under skip range
+ *
+ * PARAMETERS :
+ * @frameId : frameId to verify
+ *
+ * RETURN : true : need to skip
+ * false: no need to skip
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isDisplayFrameToSkip(uint32_t frameId)
+{
+ return ((mFrameSkipStart != 0) && (frameId >= mFrameSkipStart) &&
+ (frameId <= mFrameSkipEnd || mFrameSkipEnd == 0)) ? TRUE : FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION : prepareHardwareForSnapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot, such as LED
+ *
+ * PARAMETERS :
+ * @afNeeded: flag indicating if Auto Focus needs to be done during preparation
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::prepareHardwareForSnapshot(int32_t afNeeded)
+{
+ ATRACE_CALL();
+ LOGI("[KPI Perf]: Send PREPARE SANSPHOT event");
+ return mCameraHandle->ops->prepare_snapshot(mCameraHandle->camera_handle,
+ afNeeded);
+}
+
+/*===========================================================================
+ * FUNCTION : needFDMetadata
+ *
+ * DESCRIPTION: check whether we need process Face Detection metadata in this chanel
+ *
+ * PARAMETERS :
+ * @channel_type: channel type
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needFDMetadata(qcamera_ch_type_enum_t channel_type)
+{
+ //Note: Currently we only process ZSL channel
+ bool value = false;
+ if(channel_type == QCAMERA_CH_TYPE_ZSL){
+ //check if FD requirement is enabled
+ if(mParameters.isSnapshotFDNeeded() &&
+ mParameters.isFaceDetectionEnabled()){
+ value = true;
+ LOGH("Face Detection metadata is required in ZSL mode.");
+ }
+ }
+
+ return value;
+}
+
+/*===========================================================================
+ * FUNCTION : deferredWorkRoutine
+ *
+ * DESCRIPTION: data process routine that executes deferred tasks
+ *
+ * PARAMETERS :
+ * @data : user data ptr (QCamera2HardwareInterface)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void *QCamera2HardwareInterface::deferredWorkRoutine(void *obj)
+{
+ int running = 1;
+ int ret;
+ uint8_t is_active = FALSE;
+ int32_t job_status = 0;
+
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)obj;
+ QCameraCmdThread *cmdThread = &pme->mDeferredWorkThread;
+ cmdThread->setName("CAM_defrdWrk");
+
+ do {
+ do {
+ ret = cam_sem_wait(&cmdThread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ // we got notified about new cmd avail in cmd queue
+ camera_cmd_type_t cmd = cmdThread->getCmd();
+ LOGD("cmd: %d", cmd);
+ switch (cmd) {
+ case CAMERA_CMD_TYPE_START_DATA_PROC:
+ LOGH("start data proc");
+ is_active = TRUE;
+ break;
+ case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+ LOGH("stop data proc");
+ is_active = FALSE;
+ // signal cmd is completed
+ cam_sem_post(&cmdThread->sync_sem);
+ break;
+ case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+ {
+ DefWork *dw =
+ reinterpret_cast<DefWork *>(pme->mCmdQueue.dequeue());
+
+ if ( NULL == dw ) {
+ LOGE("Invalid deferred work");
+ break;
+ }
+
+ switch( dw->cmd ) {
+ case CMD_DEF_ALLOCATE_BUFF:
+ {
+ QCameraChannel * pChannel = dw->args.allocArgs.ch;
+
+ if ( NULL == pChannel ) {
+ LOGE("Invalid deferred work channel");
+ job_status = BAD_VALUE;
+ break;
+ }
+
+ cam_stream_type_t streamType = dw->args.allocArgs.type;
+ LOGH("Deferred buffer allocation started for stream type: %d",
+ streamType);
+
+ uint32_t iNumOfStreams = pChannel->getNumOfStreams();
+ QCameraStream *pStream = NULL;
+ for ( uint32_t i = 0; i < iNumOfStreams; ++i) {
+ pStream = pChannel->getStreamByIndex(i);
+
+ if ( NULL == pStream ) {
+ job_status = BAD_VALUE;
+ break;
+ }
+
+ if ( pStream->isTypeOf(streamType)) {
+ if ( pStream->allocateBuffers() ) {
+ LOGE("Error allocating buffers !!!");
+ job_status = NO_MEMORY;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ }
+ break;
+ }
+ }
+ }
+ break;
+ case CMD_DEF_PPROC_START:
+ {
+ int32_t ret = pme->getDefJobStatus(pme->mInitPProcJob);
+ if (ret != NO_ERROR) {
+ job_status = ret;
+ LOGE("PPROC Start failed");
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+ QCameraChannel * pChannel = dw->args.pprocArgs;
+ assert(pChannel);
+
+ if (pme->m_postprocessor.start(pChannel) != NO_ERROR) {
+ LOGE("cannot start postprocessor");
+ job_status = BAD_VALUE;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ }
+ }
+ break;
+ case CMD_DEF_METADATA_ALLOC:
+ {
+ int32_t ret = pme->getDefJobStatus(pme->mParamAllocJob);
+ if (ret != NO_ERROR) {
+ job_status = ret;
+ LOGE("Metadata alloc failed");
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+ pme->mMetadataMem = new QCameraMetadataStreamMemory(
+ QCAMERA_ION_USE_CACHE);
+
+ if (pme->mMetadataMem == NULL) {
+ LOGE("Unable to allocate metadata buffers");
+ job_status = BAD_VALUE;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ } else {
+ int32_t rc = pme->mMetadataMem->allocate(
+ dw->args.metadataAllocArgs.bufferCnt,
+ dw->args.metadataAllocArgs.size,
+ NON_SECURE);
+ if (rc < 0) {
+ delete pme->mMetadataMem;
+ pme->mMetadataMem = NULL;
+ }
+ }
+ }
+ break;
+ case CMD_DEF_CREATE_JPEG_SESSION:
+ {
+ QCameraChannel * pChannel = dw->args.pprocArgs;
+ assert(pChannel);
+
+ int32_t ret = pme->getDefJobStatus(pme->mReprocJob);
+ if (ret != NO_ERROR) {
+ job_status = ret;
+ LOGE("Jpeg create failed");
+ break;
+ }
+
+ if (pme->m_postprocessor.createJpegSession(pChannel)
+ != NO_ERROR) {
+ LOGE("cannot create JPEG session");
+ job_status = UNKNOWN_ERROR;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ }
+ }
+ break;
+ case CMD_DEF_PPROC_INIT:
+ {
+ int32_t rc = NO_ERROR;
+
+ jpeg_encode_callback_t jpegEvtHandle =
+ dw->args.pprocInitArgs.jpeg_cb;
+ void* user_data = dw->args.pprocInitArgs.user_data;
+ QCameraPostProcessor *postProcessor =
+ &(pme->m_postprocessor);
+ uint32_t cameraId = pme->mCameraId;
+ cam_capability_t *capability =
+ gCamCapability[cameraId];
+ cam_padding_info_t padding_info;
+ cam_padding_info_t& cam_capability_padding_info =
+ capability->padding_info;
+
+ if(!pme->mJpegClientHandle) {
+ rc = pme->initJpegHandle();
+ if (rc != NO_ERROR) {
+ LOGE("Error!! creating JPEG handle failed");
+ job_status = UNKNOWN_ERROR;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+ }
+ LOGH("mJpegClientHandle : %d", pme->mJpegClientHandle);
+
+ rc = postProcessor->setJpegHandle(&pme->mJpegHandle,
+ &pme->mJpegMpoHandle,
+ pme->mJpegClientHandle);
+ if (rc != 0) {
+ LOGE("Error!! set JPEG handle failed");
+ job_status = UNKNOWN_ERROR;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+
+ /* get max pic size for jpeg work buf calculation*/
+ rc = postProcessor->init(jpegEvtHandle, user_data);
+
+ if (rc != NO_ERROR) {
+ LOGE("cannot init postprocessor");
+ job_status = UNKNOWN_ERROR;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+
+ // update padding info from jpeg
+ postProcessor->getJpegPaddingReq(padding_info);
+ if (cam_capability_padding_info.width_padding <
+ padding_info.width_padding) {
+ cam_capability_padding_info.width_padding =
+ padding_info.width_padding;
+ }
+ if (cam_capability_padding_info.height_padding <
+ padding_info.height_padding) {
+ cam_capability_padding_info.height_padding =
+ padding_info.height_padding;
+ }
+ if (cam_capability_padding_info.plane_padding !=
+ padding_info.plane_padding) {
+ cam_capability_padding_info.plane_padding =
+ mm_stream_calc_lcm(
+ cam_capability_padding_info.plane_padding,
+ padding_info.plane_padding);
+ }
+ if (cam_capability_padding_info.offset_info.offset_x
+ != padding_info.offset_info.offset_x) {
+ cam_capability_padding_info.offset_info.offset_x =
+ mm_stream_calc_lcm (
+ cam_capability_padding_info.offset_info.offset_x,
+ padding_info.offset_info.offset_x);
+ }
+ if (cam_capability_padding_info.offset_info.offset_y
+ != padding_info.offset_info.offset_y) {
+ cam_capability_padding_info.offset_info.offset_y =
+ mm_stream_calc_lcm (
+ cam_capability_padding_info.offset_info.offset_y,
+ padding_info.offset_info.offset_y);
+ }
+ }
+ break;
+ case CMD_DEF_PARAM_ALLOC:
+ {
+ int32_t rc = pme->mParameters.allocate();
+ // notify routine would not be initialized by this time.
+ // So, just update error job status
+ if (rc != NO_ERROR) {
+ job_status = rc;
+ LOGE("Param allocation failed");
+ break;
+ }
+ }
+ break;
+ case CMD_DEF_PARAM_INIT:
+ {
+ int32_t rc = pme->getDefJobStatus(pme->mParamAllocJob);
+ if (rc != NO_ERROR) {
+ job_status = rc;
+ LOGE("Param init failed");
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+
+ uint32_t camId = pme->mCameraId;
+ cam_capability_t * cap = gCamCapability[camId];
+
+ if (pme->mCameraHandle == NULL) {
+ LOGE("Camera handle is null");
+ job_status = BAD_VALUE;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+
+ // Now PostProc need calibration data as initialization
+ // time for jpeg_open and calibration data is a
+ // get param for now, so params needs to be initialized
+ // before postproc init
+ rc = pme->mParameters.init(cap,
+ pme->mCameraHandle,
+ pme);
+ if (rc != 0) {
+ job_status = UNKNOWN_ERROR;
+ LOGE("Parameter Initialization failed");
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+
+ // Get related cam calibration only in
+ // dual camera mode
+ if (pme->getRelatedCamSyncInfo()->sync_control ==
+ CAM_SYNC_RELATED_SENSORS_ON) {
+ rc = pme->mParameters.getRelatedCamCalibration(
+ &(pme->mJpegMetadata.otp_calibration_data));
+ LOGD("Dumping Calibration Data Version Id %f rc %d",
+ pme->mJpegMetadata.otp_calibration_data.calibration_format_version,
+ rc);
+ if (rc != 0) {
+ job_status = UNKNOWN_ERROR;
+ LOGE("getRelatedCamCalibration failed");
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+ pme->m_bRelCamCalibValid = true;
+ }
+
+ pme->mJpegMetadata.sensor_mount_angle =
+ cap->sensor_mount_angle;
+ pme->mJpegMetadata.default_sensor_flip = FLIP_NONE;
+
+ pme->mParameters.setMinPpMask(
+ cap->qcom_supported_feature_mask);
+ pme->mExifParams.debug_params =
+ (mm_jpeg_debug_exif_params_t *)
+ malloc(sizeof(mm_jpeg_debug_exif_params_t));
+ if (!pme->mExifParams.debug_params) {
+ LOGE("Out of Memory. Allocation failed for "
+ "3A debug exif params");
+ job_status = NO_MEMORY;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_UNKNOWN, 0);
+ break;
+ }
+ memset(pme->mExifParams.debug_params, 0,
+ sizeof(mm_jpeg_debug_exif_params_t));
+ }
+ break;
+ case CMD_DEF_GENERIC:
+ {
+ BackgroundTask *bgTask = dw->args.genericArgs;
+ job_status = bgTask->bgFunction(bgTask->bgArgs);
+ }
+ break;
+ default:
+ LOGE("Incorrect command : %d", dw->cmd);
+ }
+
+ pme->dequeueDeferredWork(dw, job_status);
+ }
+ break;
+ case CAMERA_CMD_TYPE_EXIT:
+ running = 0;
+ break;
+ default:
+ break;
+ }
+ } while (running);
+
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : queueDeferredWork
+ *
+ * DESCRIPTION: function which queues deferred tasks
+ *
+ * PARAMETERS :
+ * @cmd : deferred task
+ * @args : deferred task arguments
+ *
+ * RETURN : job id of deferred job
+ * : 0 in case of error
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::queueDeferredWork(DeferredWorkCmd cmd,
+ DeferWorkArgs args)
+{
+ Mutex::Autolock l(mDefLock);
+ for (int32_t i = 0; i < MAX_ONGOING_JOBS; ++i) {
+ if (mDefOngoingJobs[i].mDefJobId == 0) {
+ DefWork *dw = new DefWork(cmd, sNextJobId, args);
+ if (!dw) {
+ LOGE("out of memory.");
+ return 0;
+ }
+ if (mCmdQueue.enqueue(dw)) {
+ mDefOngoingJobs[i].mDefJobId = sNextJobId++;
+ mDefOngoingJobs[i].mDefJobStatus = 0;
+ if (sNextJobId == 0) { // handle overflow
+ sNextJobId = 1;
+ }
+ mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB,
+ FALSE,
+ FALSE);
+ return mDefOngoingJobs[i].mDefJobId;
+ } else {
+ LOGD("Command queue not active! cmd = %d", cmd);
+ delete dw;
+ return 0;
+ }
+ }
+ }
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : initJpegHandle
+ *
+ * DESCRIPTION: Opens JPEG client and gets a handle.
+ * Sends Dual cam calibration info if present
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::initJpegHandle() {
+ // Check if JPEG client handle is present
+ LOGH("E");
+ if(!mJpegClientHandle) {
+ mm_dimension max_size = {0, 0};
+ cam_dimension_t size;
+
+ mParameters.getMaxPicSize(size);
+ max_size.w = size.width;
+ max_size.h = size.height;
+
+ if (getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ if (m_bRelCamCalibValid) {
+ mJpegClientHandle = jpeg_open(&mJpegHandle, &mJpegMpoHandle,
+ max_size, &mJpegMetadata);
+ } else {
+ mJpegClientHandle = jpeg_open(&mJpegHandle, &mJpegMpoHandle,
+ max_size, NULL);
+ }
+ } else {
+ mJpegClientHandle = jpeg_open(&mJpegHandle, NULL, max_size, NULL);
+ }
+ if (!mJpegClientHandle) {
+ LOGE("Error !! jpeg_open failed!! ");
+ return UNKNOWN_ERROR;
+ }
+ // Set JPEG initialized as true to signify that this camera
+ // has initialized the handle
+ mJpegHandleOwner = true;
+ }
+ LOGH("X mJpegHandleOwner: %d, mJpegClientHandle: %d camera id: %d",
+ mJpegHandleOwner, mJpegClientHandle, mCameraId);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : deinitJpegHandle
+ *
+ * DESCRIPTION: Closes JPEG client using handle
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::deinitJpegHandle() {
+ int32_t rc = NO_ERROR;
+ LOGH("E");
+ // Check if JPEG client handle is present and inited by this camera
+ if(mJpegHandleOwner && mJpegClientHandle) {
+ rc = mJpegHandle.close(mJpegClientHandle);
+ if (rc != NO_ERROR) {
+ LOGE("Error!! Closing mJpegClientHandle: %d failed",
+ mJpegClientHandle);
+ }
+ memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+ memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
+ mJpegHandleOwner = false;
+ }
+ mJpegClientHandle = 0;
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegHandleInfo
+ *
+ * DESCRIPTION: sets JPEG client handle info
+ *
+ * PARAMETERS:
+ * @ops : JPEG ops
+ * @mpo_ops : Jpeg MPO ops
+ * @pJpegClientHandle : o/p Jpeg Client Handle
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setJpegHandleInfo(mm_jpeg_ops_t *ops,
+ mm_jpeg_mpo_ops_t *mpo_ops, uint32_t pJpegClientHandle) {
+
+ if (pJpegClientHandle && ops && mpo_ops) {
+ LOGH("Setting JPEG client handle %d",
+ pJpegClientHandle);
+ memcpy(&mJpegHandle, ops, sizeof(mm_jpeg_ops_t));
+ memcpy(&mJpegMpoHandle, mpo_ops, sizeof(mm_jpeg_mpo_ops_t));
+ mJpegClientHandle = pJpegClientHandle;
+ return NO_ERROR;
+ }
+ else {
+ LOGE("Error!! No Handle found: %d",
+ pJpegClientHandle);
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegHandleInfo
+ *
+ * DESCRIPTION: gets JPEG client handle info
+ *
+ * PARAMETERS:
+ * @ops : JPEG ops
+ * @mpo_ops : Jpeg MPO ops
+ * @pJpegClientHandle : o/p Jpeg Client Handle
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::getJpegHandleInfo(mm_jpeg_ops_t *ops,
+ mm_jpeg_mpo_ops_t *mpo_ops, uint32_t *pJpegClientHandle) {
+
+ if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+ LOGE("Init PProc Deferred work failed");
+ return UNKNOWN_ERROR;
+ }
+ // Copy JPEG ops if present
+ if (ops && mpo_ops && pJpegClientHandle) {
+ memcpy(ops, &mJpegHandle, sizeof(mm_jpeg_ops_t));
+ memcpy(mpo_ops, &mJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
+ *pJpegClientHandle = mJpegClientHandle;
+ LOGH("Getting JPEG client handle %d",
+ pJpegClientHandle);
+ return NO_ERROR;
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : dequeueDeferredWork
+ *
+ * DESCRIPTION: function which dequeues deferred tasks
+ *
+ * PARAMETERS :
+ * @dw : deferred work
+ * @jobStatus: deferred task job status
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::dequeueDeferredWork(DefWork* dw, int32_t jobStatus)
+{
+ Mutex::Autolock l(mDefLock);
+ for (uint32_t i = 0; i < MAX_ONGOING_JOBS; i++) {
+ if (mDefOngoingJobs[i].mDefJobId == dw->id) {
+ if (jobStatus != NO_ERROR) {
+ mDefOngoingJobs[i].mDefJobStatus = jobStatus;
+ LOGH("updating job status %d for id %d",
+ jobStatus, dw->id);
+ } else {
+ mDefOngoingJobs[i].mDefJobId = 0;
+ mDefOngoingJobs[i].mDefJobStatus = 0;
+ }
+ delete dw;
+ mDefCond.broadcast();
+ return NO_ERROR;
+ }
+ }
+
+ return UNKNOWN_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getDefJobStatus
+ *
+ * DESCRIPTION: Gets if a deferred task is success/fail
+ *
+ * PARAMETERS :
+ * @job_id : deferred task id
+ *
+ * RETURN : NO_ERROR if the job success, otherwise false
+ *
+ * PRECONDITION : mDefLock is held by current thread
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::getDefJobStatus(uint32_t &job_id)
+{
+ for (uint32_t i = 0; i < MAX_ONGOING_JOBS; i++) {
+ if (mDefOngoingJobs[i].mDefJobId == job_id) {
+ if ( NO_ERROR != mDefOngoingJobs[i].mDefJobStatus ) {
+ LOGE("job_id (%d) was failed", job_id);
+ return mDefOngoingJobs[i].mDefJobStatus;
+ }
+ else
+ return NO_ERROR;
+ }
+ }
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : checkDeferredWork
+ *
+ * DESCRIPTION: checks if a deferred task is in progress
+ *
+ * PARAMETERS :
+ * @job_id : deferred task id
+ *
+ * RETURN : true if the task exists, otherwise false
+ *
+ * PRECONDITION : mDefLock is held by current thread
+ *==========================================================================*/
+bool QCamera2HardwareInterface::checkDeferredWork(uint32_t &job_id)
+{
+ for (uint32_t i = 0; i < MAX_ONGOING_JOBS; i++) {
+ if (mDefOngoingJobs[i].mDefJobId == job_id) {
+ return (NO_ERROR == mDefOngoingJobs[i].mDefJobStatus);
+ }
+ }
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : waitDeferredWork
+ *
+ * DESCRIPTION: waits for a deferred task to finish
+ *
+ * PARAMETERS :
+ * @job_id : deferred task id
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::waitDeferredWork(uint32_t &job_id)
+{
+ Mutex::Autolock l(mDefLock);
+
+ if (job_id == 0) {
+ LOGD("Invalid job id %d", job_id);
+ return NO_ERROR;
+ }
+
+ while (checkDeferredWork(job_id) == true ) {
+ mDefCond.waitRelative(mDefLock, CAMERA_DEFERRED_THREAD_TIMEOUT);
+ }
+ return getDefJobStatus(job_id);
+}
+
+/*===========================================================================
+ * FUNCTION : scheduleBackgroundTask
+ *
+ * DESCRIPTION: Run a requested task in the deferred thread
+ *
+ * PARAMETERS :
+ * @bgTask : Task to perform in the background
+ *
+ * RETURN : job id of deferred job
+ * : 0 in case of error
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::scheduleBackgroundTask(BackgroundTask* bgTask)
+{
+ DeferWorkArgs args;
+ memset(&args, 0, sizeof(DeferWorkArgs));
+ args.genericArgs = bgTask;
+
+ return queueDeferredWork(CMD_DEF_GENERIC, args);
+}
+
+/*===========================================================================
+ * FUNCTION : waitForBackgroundTask
+ *
+ * DESCRIPTION: Wait for a background task to complete
+ *
+ * PARAMETERS :
+ * @taskId : Task id to wait for
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::waitForBackgroundTask(uint32_t& taskId)
+{
+ return waitDeferredWork(taskId);
+}
+
+/*===========================================================================
+ * FUNCTION : needDeferedAllocation
+ *
+ * DESCRIPTION: Function to decide background task for streams
+ *
+ * PARAMETERS :
+ * @stream_type : stream type
+ *
+ * RETURN : true - if background task is needed
+ * false - if background task is NOT needed
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needDeferred(cam_stream_type_t stream_type)
+{
+ if ((stream_type == CAM_STREAM_TYPE_PREVIEW && mPreviewWindow == NULL)
+ || (stream_type == CAM_STREAM_TYPE_ANALYSIS)) {
+ return FALSE;
+ }
+
+ if ((stream_type == CAM_STREAM_TYPE_RAW)
+ && (mParameters.getofflineRAW())) {
+ return FALSE;
+ }
+
+ if ((stream_type == CAM_STREAM_TYPE_SNAPSHOT)
+ && (!mParameters.getRecordingHintValue())){
+ return TRUE;
+ }
+
+ if ((stream_type == CAM_STREAM_TYPE_PREVIEW)
+ || (stream_type == CAM_STREAM_TYPE_METADATA)
+ || (stream_type == CAM_STREAM_TYPE_RAW)
+ || (stream_type == CAM_STREAM_TYPE_POSTVIEW)) {
+ return TRUE;
+ }
+
+ if (stream_type == CAM_STREAM_TYPE_VIDEO) {
+ return FALSE;
+ }
+ return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION : isRegularCapture
+ *
+ * DESCRIPTION: Check configuration for regular catpure
+ *
+ * PARAMETERS :
+ *
+ * RETURN : true - regular capture
+ * false - other type of capture
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isRegularCapture()
+{
+ bool ret = false;
+
+ if (numOfSnapshotsExpected() == 1 &&
+ !isLongshotEnabled() &&
+ !mParameters.isHDREnabled() &&
+ !mParameters.getRecordingHintValue() &&
+ !isZSLMode() && (!mParameters.getofflineRAW()|| mParameters.getQuadraCfa())) {
+ ret = true;
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getLogLevel
+ *
+ * DESCRIPTION: Reads the log level property into a variable
+ *
+ * PARAMETERS :
+ * None
+ *
+ * RETURN :
+ * None
+ *==========================================================================*/
+void QCamera2HardwareInterface::getLogLevel()
+{
+ char prop[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.kpi.debug", prop, "1");
+ gKpiDebugLevel = atoi(prop);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : getSensorType
+ *
+ * DESCRIPTION: Returns the type of sensor being used whether YUV or Bayer
+ *
+ * PARAMETERS :
+ * None
+ *
+ * RETURN : Type of sensor - bayer or YUV
+ *
+ *==========================================================================*/
+cam_sensor_t QCamera2HardwareInterface::getSensorType()
+{
+ return gCamCapability[mCameraId]->sensor_type.sens_type;
+}
+
+/*===========================================================================
+ * FUNCTION : startRAWChannel
+ *
+ * DESCRIPTION: start RAW Channel
+ *
+ * PARAMETERS :
+ * @pChannel : Src channel to link this RAW channel.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startRAWChannel(QCameraChannel *pMetaChannel)
+{
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_RAW];
+ if ((NULL != pChannel) && (mParameters.getofflineRAW())) {
+ // Find and try to link a metadata stream from preview channel
+ QCameraStream *pMetaStream = NULL;
+
+ if (pMetaChannel != NULL) {
+ uint32_t streamNum = pMetaChannel->getNumOfStreams();
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+ pStream = pMetaChannel->getStreamByIndex(i);
+ if ((NULL != pStream) &&
+ (CAM_STREAM_TYPE_METADATA == pStream->getMyType())) {
+ pMetaStream = pStream;
+ break;
+ }
+ }
+
+ if (NULL != pMetaStream) {
+ rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+ if (NO_ERROR != rc) {
+ LOGE("Metadata stream link failed %d", rc);
+ }
+ }
+ }
+ rc = pChannel->start();
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : startRecording
+ *
+ * DESCRIPTION: start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopRAWChannel()
+{
+ int32_t rc = NO_ERROR;
+ rc = stopChannel(QCAMERA_CH_TYPE_RAW);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : isLowPowerMode
+ *
+ * DESCRIPTION: Returns TRUE if low power mode settings are to be applied for video recording
+ *
+ * PARAMETERS :
+ * None
+ *
+ * RETURN : TRUE/FALSE
+ *
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isLowPowerMode()
+{
+ cam_dimension_t dim;
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_VIDEO, dim);
+
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("camera.lowpower.record.enable", prop, "0");
+ int enable = atoi(prop);
+
+ //Enable low power mode if :
+ //1. Video resolution is 2k (2048x1080) or above and
+ //2. camera.lowpower.record.enable is set
+
+ bool isLowpower = mParameters.getRecordingHintValue() && enable
+ && ((dim.width * dim.height) >= (2048 * 1080));
+ return isLowpower;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCamera2HWI.h b/camera/QCamera2/HAL/QCamera2HWI.h
new file mode 100644
index 0000000..0c1a00a
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2HWI.h
@@ -0,0 +1,795 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HARDWAREINTERFACE_H__
+#define __QCAMERA2HARDWAREINTERFACE_H__
+
+// System dependencies
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+
+// Camera dependencies
+#include "camera.h"
+#include "QCameraAllocator.h"
+#include "QCameraChannel.h"
+#include "QCameraCmdThread.h"
+#include "QCameraDisplay.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+#include "QCameraParametersIntf.h"
+#include "QCameraPerf.h"
+#include "QCameraPostProc.h"
+#include "QCameraQueue.h"
+#include "QCameraStream.h"
+#include "QCameraStateMachine.h"
+#include "QCameraThermalAdapter.h"
+
+#ifdef TARGET_TS_MAKEUP
+#include "ts_makeup_engine.h"
+#include "ts_detectface_engine.h"
+#endif
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+#include "QCameraTrace.h"
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+ QCAMERA_CH_TYPE_ZSL,
+ QCAMERA_CH_TYPE_CAPTURE,
+ QCAMERA_CH_TYPE_PREVIEW,
+ QCAMERA_CH_TYPE_VIDEO,
+ QCAMERA_CH_TYPE_SNAPSHOT,
+ QCAMERA_CH_TYPE_RAW,
+ QCAMERA_CH_TYPE_METADATA,
+ QCAMERA_CH_TYPE_ANALYSIS,
+ QCAMERA_CH_TYPE_CALLBACK,
+ QCAMERA_CH_TYPE_MAX
+} qcamera_ch_type_enum_t;
+
+typedef struct {
+ int32_t msg_type;
+ int32_t ext1;
+ int32_t ext2;
+} qcamera_evt_argm_t;
+
+#define QCAMERA_DUMP_FRM_PREVIEW 1
+#define QCAMERA_DUMP_FRM_VIDEO (1<<1)
+#define QCAMERA_DUMP_FRM_SNAPSHOT (1<<2)
+#define QCAMERA_DUMP_FRM_THUMBNAIL (1<<3)
+#define QCAMERA_DUMP_FRM_RAW (1<<4)
+#define QCAMERA_DUMP_FRM_JPEG (1<<5)
+#define QCAMERA_DUMP_FRM_INPUT_REPROCESS (1<<6)
+
+#define QCAMERA_DUMP_FRM_MASK_ALL 0x000000ff
+
+#define QCAMERA_ION_USE_CACHE true
+#define QCAMERA_ION_USE_NOCACHE false
+#define MAX_ONGOING_JOBS 25
+
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+
+#define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
+
+typedef enum {
+ QCAMERA_NOTIFY_CALLBACK,
+ QCAMERA_DATA_CALLBACK,
+ QCAMERA_DATA_TIMESTAMP_CALLBACK,
+ QCAMERA_DATA_SNAPSHOT_CALLBACK
+} qcamera_callback_type_m;
+
+typedef void (*camera_release_callback)(void *user_data,
+ void *cookie,
+ int32_t cb_status);
+typedef void (*jpeg_data_callback)(int32_t msg_type,
+ const camera_memory_t *data, unsigned int index,
+ camera_frame_metadata_t *metadata, void *user,
+ uint32_t frame_idx, camera_release_callback release_cb,
+ void *release_cookie, void *release_data);
+
+typedef struct {
+ qcamera_callback_type_m cb_type; // event type
+ int32_t msg_type; // msg type
+ int32_t ext1; // extended parameter
+ int32_t ext2; // extended parameter
+ camera_memory_t * data; // ptr to data memory struct
+ unsigned int index; // index of the buf in the whole buffer
+ int64_t timestamp; // buffer timestamp
+ camera_frame_metadata_t *metadata; // meta data
+ void *user_data; // any data needs to be released after callback
+ void *cookie; // release callback cookie
+ camera_release_callback release_cb; // release callback
+ uint32_t frame_index; // frame index for the buffer
+} qcamera_callback_argm_t;
+
+class QCameraCbNotifier {
+public:
+ QCameraCbNotifier(QCamera2HardwareInterface *parent) :
+ mNotifyCb (NULL),
+ mDataCb (NULL),
+ mDataCbTimestamp (NULL),
+ mCallbackCookie (NULL),
+ mJpegCb(NULL),
+ mJpegCallbackCookie(NULL),
+ mParent (parent),
+ mDataQ(releaseNotifications, this),
+ mActive(false){}
+
+ virtual ~QCameraCbNotifier();
+
+ virtual int32_t notifyCallback(qcamera_callback_argm_t &cbArgs);
+ virtual void setCallbacks(camera_notify_callback notifyCb,
+ camera_data_callback dataCb,
+ camera_data_timestamp_callback dataCbTimestamp,
+ void *callbackCookie);
+ virtual void setJpegCallBacks(
+ jpeg_data_callback jpegCb, void *callbackCookie);
+ virtual int32_t startSnapshots();
+ virtual void stopSnapshots();
+ virtual void exit();
+ static void * cbNotifyRoutine(void * data);
+ static void releaseNotifications(void *data, void *user_data);
+ static bool matchSnapshotNotifications(void *data, void *user_data);
+ static bool matchPreviewNotifications(void *data, void *user_data);
+ static bool matchTimestampNotifications(void *data, void *user_data);
+ virtual int32_t flushPreviewNotifications();
+ virtual int32_t flushVideoNotifications();
+private:
+
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ void *mCallbackCookie;
+ jpeg_data_callback mJpegCb;
+ void *mJpegCallbackCookie;
+ QCamera2HardwareInterface *mParent;
+
+ QCameraQueue mDataQ;
+ QCameraCmdThread mProcTh;
+ bool mActive;
+};
+class QCamera2HardwareInterface : public QCameraAllocator,
+ public QCameraThermalCallback, public QCameraAdjustFPS
+{
+public:
+ /* static variable and functions accessed by camera service */
+ static camera_device_ops_t mCameraOps;
+
+ static int set_preview_window(struct camera_device *,
+ struct preview_stream_ops *window);
+ static void set_CallBacks(struct camera_device *,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+ static void enable_msg_type(struct camera_device *, int32_t msg_type);
+ static void disable_msg_type(struct camera_device *, int32_t msg_type);
+ static int msg_type_enabled(struct camera_device *, int32_t msg_type);
+ static int start_preview(struct camera_device *);
+ static void stop_preview(struct camera_device *);
+ static int preview_enabled(struct camera_device *);
+ static int store_meta_data_in_buffers(struct camera_device *, int enable);
+ static int restart_start_preview(struct camera_device *);
+ static int restart_stop_preview(struct camera_device *);
+ static int pre_start_recording(struct camera_device *);
+ static int start_recording(struct camera_device *);
+ static void stop_recording(struct camera_device *);
+ static int recording_enabled(struct camera_device *);
+ static void release_recording_frame(struct camera_device *, const void *opaque);
+ static int auto_focus(struct camera_device *);
+ static int cancel_auto_focus(struct camera_device *);
+ static int pre_take_picture(struct camera_device *);
+ static int take_picture(struct camera_device *);
+ int takeLiveSnapshot_internal();
+ int cancelLiveSnapshot_internal();
+ int takeBackendPic_internal(bool *JpegMemOpt, char *raw_format);
+ void clearIntPendingEvents();
+ void checkIntPicPending(bool JpegMemOpt, char *raw_format);
+ static int cancel_picture(struct camera_device *);
+ static int set_parameters(struct camera_device *, const char *parms);
+ static int stop_after_set_params(struct camera_device *);
+ static int commit_params(struct camera_device *);
+ static int restart_after_set_params(struct camera_device *);
+ static char* get_parameters(struct camera_device *);
+ static void put_parameters(struct camera_device *, char *);
+ static int send_command(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+ static int send_command_restart(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+ static void release(struct camera_device *);
+ static int dump(struct camera_device *, int fd);
+ static int close_camera_device(hw_device_t *);
+
+ static int register_face_image(struct camera_device *,
+ void *img_ptr,
+ cam_pp_offline_src_config_t *config);
+ static int prepare_preview(struct camera_device *);
+ static int prepare_snapshot(struct camera_device *device);
+
+public:
+ QCamera2HardwareInterface(uint32_t cameraId);
+ virtual ~QCamera2HardwareInterface();
+ int openCamera(struct hw_device_t **hw_device);
+
+ // Dual camera specific oprations
+ int bundleRelatedCameras(bool syncOn,
+ uint32_t related_sensor_session_id);
+ int getCameraSessionId(uint32_t* session_id);
+ const cam_sync_related_sensors_event_info_t* getRelatedCamSyncInfo(
+ void);
+ int32_t setRelatedCamSyncInfo(
+ cam_sync_related_sensors_event_info_t* info);
+ bool isFrameSyncEnabled(void);
+ int32_t setFrameSyncEnabled(bool enable);
+ int32_t setMpoComposition(bool enable);
+ bool getMpoComposition(void);
+ bool getRecordingHintValue(void);
+ int32_t setRecordingHintValue(int32_t value);
+ bool isPreviewRestartNeeded(void) { return mPreviewRestartNeeded; };
+ static int getCapabilities(uint32_t cameraId,
+ struct camera_info *info, cam_sync_type_t *cam_type);
+ static int initCapabilities(uint32_t cameraId, mm_camera_vtbl_t *cameraHandle);
+ cam_capability_t *getCamHalCapabilities();
+
+ // Implementation of QCameraAllocator
+ virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+ size_t size, int stride, int scanline, uint8_t &bufferCnt);
+ virtual int32_t allocateMoreStreamBuf(QCameraMemory *mem_obj,
+ size_t size, uint8_t &bufferCnt);
+
+ virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type);
+ virtual QCameraHeapMemory *allocateMiscBuf(cam_stream_info_t *streamInfo);
+ virtual QCameraMemory *allocateStreamUserBuf(cam_stream_info_t *streamInfo);
+ virtual void waitForDeferredAlloc(cam_stream_type_t stream_type);
+
+ // Implementation of QCameraThermalCallback
+ virtual int thermalEvtHandle(qcamera_thermal_level_enum_t *level,
+ void *userdata, void *data);
+
+ virtual int recalcFPSRange(int &minFPS, int &maxFPS,
+ const float &minVideoFPS, const float &maxVideoFPS,
+ cam_fps_range_t &adjustedRange);
+
+ friend class QCameraStateMachine;
+ friend class QCameraPostProcessor;
+ friend class QCameraCbNotifier;
+ friend class QCameraMuxer;
+
+ void setJpegCallBacks(jpeg_data_callback jpegCb,
+ void *callbackCookie);
+ int32_t initJpegHandle();
+ int32_t deinitJpegHandle();
+ int32_t setJpegHandleInfo(mm_jpeg_ops_t *ops,
+ mm_jpeg_mpo_ops_t *mpo_ops, uint32_t pJpegClientHandle);
+ int32_t getJpegHandleInfo(mm_jpeg_ops_t *ops,
+ mm_jpeg_mpo_ops_t *mpo_ops, uint32_t *pJpegClientHandle);
+ uint32_t getCameraId() { return mCameraId; };
+ bool bLiveSnapshot;
+private:
+ int setPreviewWindow(struct preview_stream_ops *window);
+ int setCallBacks(
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+ int enableMsgType(int32_t msg_type);
+ int disableMsgType(int32_t msg_type);
+ int msgTypeEnabled(int32_t msg_type);
+ int msgTypeEnabledWithLock(int32_t msg_type);
+ int startPreview();
+ int stopPreview();
+ int storeMetaDataInBuffers(int enable);
+ int preStartRecording();
+ int startRecording();
+ int stopRecording();
+ int releaseRecordingFrame(const void *opaque);
+ int autoFocus();
+ int cancelAutoFocus();
+ int preTakePicture();
+ int takePicture();
+ int stopCaptureChannel(bool destroy);
+ int cancelPicture();
+ int takeLiveSnapshot();
+ int takePictureInternal();
+ int cancelLiveSnapshot();
+ char* getParameters() {return mParameters.getParameters(); }
+ int putParameters(char *);
+ int sendCommand(int32_t cmd, int32_t &arg1, int32_t &arg2);
+ int release();
+ int dump(int fd);
+ int registerFaceImage(void *img_ptr,
+ cam_pp_offline_src_config_t *config,
+ int32_t &faceID);
+ int32_t longShot();
+
+ uint32_t deferPPInit();
+ int openCamera();
+ int closeCamera();
+
+ int processAPI(qcamera_sm_evt_enum_t api, void *api_payload);
+ int processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+ int processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+ void lockAPI();
+ void waitAPIResult(qcamera_sm_evt_enum_t api_evt, qcamera_api_result_t *apiResult);
+ void unlockAPI();
+ void signalAPIResult(qcamera_api_result_t *result);
+ void signalEvtResult(qcamera_api_result_t *result);
+
+ int calcThermalLevel(qcamera_thermal_level_enum_t level,
+ const int minFPSi, const int maxFPSi,
+ const float &minVideoFPS, const float &maxVideoFPS,
+ cam_fps_range_t &adjustedRange,
+ enum msm_vfe_frame_skip_pattern &skipPattern);
+ int updateThermalLevel(void *level);
+
+ // update entris to set parameters and check if restart is needed
+ int updateParameters(const char *parms, bool &needRestart);
+ // send request to server to set parameters
+ int commitParameterChanges();
+
+ bool isCaptureShutterEnabled();
+ bool needDebugFps();
+ bool isRegularCapture();
+ bool isCACEnabled();
+ bool is4k2kResolution(cam_dimension_t* resolution);
+ bool isPreviewRestartEnabled();
+ bool needReprocess();
+ bool needRotationReprocess();
+ void debugShowVideoFPS();
+ void debugShowPreviewFPS();
+ void dumpJpegToFile(const void *data, size_t size, uint32_t index);
+ void dumpFrameToFile(QCameraStream *stream,
+ mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc = NULL);
+ void dumpMetadataToFile(QCameraStream *stream,
+ mm_camera_buf_def_t *frame,char *type);
+ void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+ void playShutter();
+ void getThumbnailSize(cam_dimension_t &dim);
+ uint32_t getJpegQuality();
+ QCameraExif *getExifData();
+ cam_sensor_t getSensorType();
+ bool isLowPowerMode();
+
+ int32_t processAutoFocusEvent(cam_auto_focus_data_t &focus_data);
+ int32_t processZoomEvent(cam_crop_data_t &crop_info);
+ int32_t processPrepSnapshotDoneEvent(cam_prep_snapshot_state_t prep_snapshot_state);
+ int32_t processASDUpdate(cam_asd_decision_t asd_decision);
+ int32_t processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_job);
+ int32_t processHDRData(cam_asd_hdr_scene_data_t hdr_scene);
+ int32_t processRetroAECUnlock();
+ int32_t processZSLCaptureDone();
+ int32_t processSceneData(cam_scene_mode_type scene);
+ int32_t transAwbMetaToParams(cam_awb_params_t &awb_params);
+ int32_t processFocusPositionInfo(cam_focus_pos_info_t &cur_pos_info);
+ int32_t processAEInfo(cam_3a_params_t &ae_params);
+
+ int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+ int32_t sendDataNotify(int32_t msg_type,
+ camera_memory_t *data,
+ uint8_t index,
+ camera_frame_metadata_t *metadata,
+ uint32_t frame_idx);
+
+ int32_t sendPreviewCallback(QCameraStream *stream,
+ QCameraMemory *memory, uint32_t idx);
+ int32_t selectScene(QCameraChannel *pChannel,
+ mm_camera_super_buf_t *recvd_frame);
+
+ int32_t addChannel(qcamera_ch_type_enum_t ch_type);
+ int32_t startChannel(qcamera_ch_type_enum_t ch_type);
+ int32_t stopChannel(qcamera_ch_type_enum_t ch_type);
+ int32_t delChannel(qcamera_ch_type_enum_t ch_type, bool destroy = true);
+ int32_t addPreviewChannel();
+ int32_t addSnapshotChannel();
+ int32_t addVideoChannel();
+ int32_t addZSLChannel();
+ int32_t addCaptureChannel();
+ int32_t addRawChannel();
+ int32_t addMetaDataChannel();
+ int32_t addAnalysisChannel();
+ QCameraReprocessChannel *addReprocChannel(QCameraChannel *pInputChannel,
+ int8_t cur_channel_index = 0);
+ QCameraReprocessChannel *addOfflineReprocChannel(
+ cam_pp_offline_src_config_t &img_config,
+ cam_pp_feature_config_t &pp_feature,
+ stream_cb_routine stream_cb,
+ void *userdata);
+ int32_t addCallbackChannel();
+
+ int32_t addStreamToChannel(QCameraChannel *pChannel,
+ cam_stream_type_t streamType,
+ stream_cb_routine streamCB,
+ void *userData);
+ int32_t preparePreview();
+ void unpreparePreview();
+ int32_t prepareRawStream(QCameraChannel *pChannel);
+ QCameraChannel *getChannelByHandle(uint32_t channelHandle);
+ mm_camera_buf_def_t *getSnapshotFrame(mm_camera_super_buf_t *recvd_frame);
+ int32_t processFaceDetectionResult(cam_faces_data_t *fd_data);
+ bool needPreviewFDCallback(uint8_t num_faces);
+ int32_t processHistogramStats(cam_hist_stats_t &stats_data);
+ int32_t setHistogram(bool histogram_en);
+ int32_t setFaceDetection(bool enabled);
+ int32_t prepareHardwareForSnapshot(int32_t afNeeded);
+ bool needProcessPreviewFrame(uint32_t frameID);
+ bool needSendPreviewCallback();
+ bool isNoDisplayMode() {return mParameters.isNoDisplayMode();};
+ bool isZSLMode() {return mParameters.isZSLMode();};
+ bool isRdiMode() {return mParameters.isRdiMode();};
+ uint8_t numOfSnapshotsExpected() {
+ return mParameters.getNumOfSnapshots();};
+ bool isSecureMode() {return mParameters.isSecureMode();};
+ bool isLongshotEnabled() { return mLongshotEnabled; };
+ bool isHFRMode() {return mParameters.isHfrMode();};
+ bool isLiveSnapshot() {return m_stateMachine.isRecording();};
+ void setRetroPicture(bool enable) { bRetroPicture = enable; };
+ bool isRetroPicture() {return bRetroPicture; };
+ bool isHDRMode() {return mParameters.isHDREnabled();};
+ uint8_t getBufNumRequired(cam_stream_type_t stream_type);
+ bool needFDMetadata(qcamera_ch_type_enum_t channel_type);
+ int32_t configureOnlineRotation(QCameraChannel &ch);
+ int32_t declareSnapshotStreams();
+ int32_t unconfigureAdvancedCapture();
+ int32_t configureAdvancedCapture();
+ int32_t configureAFBracketing(bool enable = true);
+ int32_t configureHDRBracketing();
+ int32_t stopAdvancedCapture(QCameraPicChannel *pChannel);
+ int32_t startAdvancedCapture(QCameraPicChannel *pChannel);
+ int32_t configureOptiZoom();
+ int32_t configureStillMore();
+ int32_t configureAEBracketing();
+ int32_t updatePostPreviewParameters();
+ inline void setOutputImageCount(uint32_t aCount) {mOutputCount = aCount;}
+ inline uint32_t getOutputImageCount() {return mOutputCount;}
+ bool processUFDumps(qcamera_jpeg_evt_payload_t *evt);
+ void captureDone();
+ int32_t updateMetadata(metadata_buffer_t *pMetaData);
+ void fillFacesData(cam_faces_data_t &faces_data, metadata_buffer_t *metadata);
+
+ int32_t getPPConfig(cam_pp_feature_config_t &pp_config,
+ int8_t curIndex = 0, bool multipass = FALSE);
+ virtual uint32_t scheduleBackgroundTask(BackgroundTask* bgTask);
+ virtual int32_t waitForBackgroundTask(uint32_t &taskId);
+ bool needDeferred(cam_stream_type_t stream_type);
+ static void camEvtHandle(uint32_t camera_handle,
+ mm_camera_event_t *evt,
+ void *user_data);
+ static void jpegEvtHandle(jpeg_job_status_t status,
+ uint32_t client_hdl,
+ uint32_t jobId,
+ mm_jpeg_output_t *p_buf,
+ void *userdata);
+
+ static void *evtNotifyRoutine(void *data);
+
+ // functions for different data notify cb
+ static void zsl_channel_cb(mm_camera_super_buf_t *recvd_frame, void *userdata);
+ static void capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+ void *userdata);
+ static void postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+ void *userdata);
+ static void rdi_mode_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void synchronous_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream, void *userdata);
+ static void postview_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void video_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void snapshot_channel_cb_routine(mm_camera_super_buf_t *frame,
+ void *userdata);
+ static void raw_channel_cb_routine(mm_camera_super_buf_t *frame,
+ void *userdata);
+ static void raw_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * stream,
+ void * userdata);
+ static void snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * stream,
+ void * userdata);
+ static void metadata_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+ static void callback_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream, void *userdata);
+ static void reprocess_stream_cb_routine(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+
+ static void releaseCameraMemory(void *data,
+ void *cookie,
+ int32_t cbStatus);
+ static void returnStreamBuffer(void *data,
+ void *cookie,
+ int32_t cbStatus);
+ static void getLogLevel();
+
+ int32_t startRAWChannel(QCameraChannel *pChannel);
+ int32_t stopRAWChannel();
+
+ inline bool getNeedRestart() {return m_bNeedRestart;}
+ inline void setNeedRestart(bool needRestart) {m_bNeedRestart = needRestart;}
+
+ /*Start display skip. Skip starts after
+ skipCnt number of frames from current frame*/
+ void setDisplaySkip(bool enabled, uint8_t skipCnt = 0);
+ /*Caller can specify range frameID to skip.
+ if end is 0, all the frames after start will be skipped*/
+ void setDisplayFrameSkip(uint32_t start = 0, uint32_t end = 0);
+ /*Verifies if frameId is valid to skip*/
+ bool isDisplayFrameToSkip(uint32_t frameId);
+
+private:
+ camera_device_t mCameraDevice;
+ uint32_t mCameraId;
+ mm_camera_vtbl_t *mCameraHandle;
+ bool mCameraOpened;
+
+ cam_jpeg_metadata_t mJpegMetadata;
+ bool m_bRelCamCalibValid;
+
+ preview_stream_ops_t *mPreviewWindow;
+ QCameraParametersIntf mParameters;
+ int32_t mMsgEnabled;
+ int mStoreMetaDataInFrame;
+
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mGetMemory;
+ jpeg_data_callback mJpegCb;
+ void *mCallbackCookie;
+ void *mJpegCallbackCookie;
+ bool m_bMpoEnabled;
+
+ QCameraStateMachine m_stateMachine; // state machine
+ bool m_smThreadActive;
+ QCameraPostProcessor m_postprocessor; // post processor
+ QCameraThermalAdapter &m_thermalAdapter;
+ QCameraCbNotifier m_cbNotifier;
+ QCameraPerfLock m_perfLock;
+ pthread_mutex_t m_lock;
+ pthread_cond_t m_cond;
+ api_result_list *m_apiResultList;
+ QCameraMemoryPool m_memoryPool;
+
+ pthread_mutex_t m_evtLock;
+ pthread_cond_t m_evtCond;
+ qcamera_api_result_t m_evtResult;
+
+
+ QCameraChannel *m_channels[QCAMERA_CH_TYPE_MAX]; // array holding channel ptr
+
+ bool m_bPreviewStarted; //flag indicates first preview frame callback is received
+ bool m_bRecordStarted; //flag indicates Recording is started for first time
+
+ // Signifies if ZSL Retro Snapshots are enabled
+ bool bRetroPicture;
+ // Signifies AEC locked during zsl snapshots
+ bool m_bLedAfAecLock;
+ cam_af_state_t m_currentFocusState;
+
+ uint32_t mDumpFrmCnt; // frame dump count
+ uint32_t mDumpSkipCnt; // frame skip count
+ mm_jpeg_exif_params_t mExifParams;
+ qcamera_thermal_level_enum_t mThermalLevel;
+ bool mActiveAF;
+ bool m_HDRSceneEnabled;
+ bool mLongshotEnabled;
+
+ pthread_t mLiveSnapshotThread;
+ pthread_t mIntPicThread;
+ bool mFlashNeeded;
+ uint32_t mDeviceRotation;
+ uint32_t mCaptureRotation;
+ uint32_t mJpegExifRotation;
+ bool mUseJpegExifRotation;
+ bool mIs3ALocked;
+ bool mPrepSnapRun;
+ int32_t mZoomLevel;
+ // Flag to indicate whether preview restart needed (for dual camera mode)
+ bool mPreviewRestartNeeded;
+
+ int mVFrameCount;
+ int mVLastFrameCount;
+ nsecs_t mVLastFpsTime;
+ double mVFps;
+ int mPFrameCount;
+ int mPLastFrameCount;
+ nsecs_t mPLastFpsTime;
+ double mPFps;
+ uint8_t mInstantAecFrameCount;
+
+ //eztune variables for communication with eztune server at backend
+ bool m_bIntJpegEvtPending;
+ bool m_bIntRawEvtPending;
+ char m_BackendFileName[QCAMERA_MAX_FILEPATH_LENGTH];
+ size_t mBackendFileSize;
+ pthread_mutex_t m_int_lock;
+ pthread_cond_t m_int_cond;
+
+ enum DeferredWorkCmd {
+ CMD_DEF_ALLOCATE_BUFF,
+ CMD_DEF_PPROC_START,
+ CMD_DEF_PPROC_INIT,
+ CMD_DEF_METADATA_ALLOC,
+ CMD_DEF_CREATE_JPEG_SESSION,
+ CMD_DEF_PARAM_ALLOC,
+ CMD_DEF_PARAM_INIT,
+ CMD_DEF_GENERIC,
+ CMD_DEF_MAX
+ };
+
+ typedef struct {
+ QCameraChannel *ch;
+ cam_stream_type_t type;
+ } DeferAllocBuffArgs;
+
+ typedef struct {
+ uint8_t bufferCnt;
+ size_t size;
+ } DeferMetadataAllocArgs;
+
+ typedef struct {
+ jpeg_encode_callback_t jpeg_cb;
+ void *user_data;
+ } DeferPProcInitArgs;
+
+ typedef union {
+ DeferAllocBuffArgs allocArgs;
+ QCameraChannel *pprocArgs;
+ DeferMetadataAllocArgs metadataAllocArgs;
+ DeferPProcInitArgs pprocInitArgs;
+ BackgroundTask *genericArgs;
+ } DeferWorkArgs;
+
+ typedef struct {
+ uint32_t mDefJobId;
+
+ //Job status is needed to check job was successful or failed
+ //Error code when job was not sucessful and there is error
+ //0 when is initialized.
+ //for sucessfull job, do not need to maintain job status
+ int32_t mDefJobStatus;
+ } DefOngoingJob;
+
+ DefOngoingJob mDefOngoingJobs[MAX_ONGOING_JOBS];
+
+ struct DefWork
+ {
+ DefWork(DeferredWorkCmd cmd_,
+ uint32_t id_,
+ DeferWorkArgs args_)
+ : cmd(cmd_),
+ id(id_),
+ args(args_){};
+
+ DeferredWorkCmd cmd;
+ uint32_t id;
+ DeferWorkArgs args;
+ };
+
+ QCameraCmdThread mDeferredWorkThread;
+ QCameraQueue mCmdQueue;
+
+ Mutex mDefLock;
+ Condition mDefCond;
+
+ uint32_t queueDeferredWork(DeferredWorkCmd cmd,
+ DeferWorkArgs args);
+ uint32_t dequeueDeferredWork(DefWork* dw, int32_t jobStatus);
+ int32_t waitDeferredWork(uint32_t &job_id);
+ static void *deferredWorkRoutine(void *obj);
+ bool checkDeferredWork(uint32_t &job_id);
+ int32_t getDefJobStatus(uint32_t &job_id);
+
+ uint32_t mReprocJob;
+ uint32_t mJpegJob;
+ uint32_t mMetadataAllocJob;
+ uint32_t mInitPProcJob;
+ uint32_t mParamAllocJob;
+ uint32_t mParamInitJob;
+ uint32_t mOutputCount;
+ uint32_t mInputCount;
+ bool mAdvancedCaptureConfigured;
+ bool mHDRBracketingEnabled;
+ int32_t mNumPreviewFaces;
+ // Jpeg Handle shared between HWI instances
+ mm_jpeg_ops_t mJpegHandle;
+ // MPO handle shared between HWI instances
+ // this is needed for MPO composition of related
+ // cam images
+ mm_jpeg_mpo_ops_t mJpegMpoHandle;
+ uint32_t mJpegClientHandle;
+ bool mJpegHandleOwner;
+ //ts add for makeup
+#ifdef TARGET_TS_MAKEUP
+ TSRect mFaceRect;
+ bool TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,QCameraStream * pStream);
+ bool TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,QCameraStream * pStream);
+ bool TsMakeupProcess(mm_camera_buf_def_t *frame,QCameraStream * stream,TSRect& faceRect);
+#endif
+ QCameraMemory *mMetadataMem;
+ QCameraVideoMemory *mVideoMem;
+
+ static uint32_t sNextJobId;
+
+ //Gralloc memory details
+ pthread_mutex_t mGrallocLock;
+ uint8_t mEnqueuedBuffers;
+ bool mCACDoneReceived;
+
+ //GPU library to read buffer padding details.
+ void *lib_surface_utils;
+ int (*LINK_get_surface_pixel_alignment)();
+ uint32_t mSurfaceStridePadding;
+
+ //QCamera Display Object
+ QCameraDisplay mCameraDisplay;
+
+ bool m_bNeedRestart;
+ Mutex mMapLock;
+ Condition mMapCond;
+
+ //Used to decide the next frameID to be skipped
+ uint32_t mLastPreviewFrameID;
+ //FrameID to start frame skip.
+ uint32_t mFrameSkipStart;
+ /*FrameID to stop frameskip. If this is not set,
+ all frames are skipped till we set this*/
+ uint32_t mFrameSkipEnd;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HARDWAREINTERFACE_H__ */
diff --git a/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp b/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
new file mode 100644
index 0000000..ec05ec4
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -0,0 +1,3512 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
+#include STAT_H
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION : zsl_channel_cb
+ *
+ * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
+ * mm-camera-interface
+ *
+ * PARAMETERS :
+ * @recvd_frame : received super buffer
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : recvd_frame will be released after this call by caller, so if
+ * async operation needed for recvd_frame, it's our responsibility
+ * to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
+ void *userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf]: E");
+ char value[PROPERTY_VALUE_MAX];
+ bool dump_raw = false;
+ bool log_matching = false;
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+ LOGE("camera obj not valid");
+ return;
+ }
+
+ QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
+ if (pChannel == NULL ||
+ pChannel->getMyHandle() != recvd_frame->ch_id) {
+ LOGE("ZSL channel doesn't exist, return here");
+ return;
+ }
+
+ if(pme->mParameters.isSceneSelectionEnabled() &&
+ !pme->m_stateMachine.isCaptureRunning()) {
+ pme->selectScene(pChannel, recvd_frame);
+ pChannel->bufDone(recvd_frame);
+ return;
+ }
+
+ LOGD("Frame CB Unlock : %d, is AEC Locked: %d",
+ recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock);
+ if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)malloc(
+ sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGE("processEvt for retro AEC unlock failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for retro AEC event");
+ }
+ }
+
+ // Check if retro-active frames are completed and camera is
+ // ready to go ahead with LED estimation for regular frames
+ if (recvd_frame->bReadyForPrepareSnapshot) {
+ // Send an event
+ LOGD("Ready for Prepare Snapshot, signal ");
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)malloc(
+ sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt Ready for Snaphot failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for prepare signal event detect"
+ " qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ /* indicate the parent that capture is done */
+ pme->captureDone();
+
+ // save a copy for the superbuf
+ mm_camera_super_buf_t* frame =
+ (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("Error allocating memory to save received_frame structure.");
+ pChannel->bufDone(recvd_frame);
+ return;
+ }
+ *frame = *recvd_frame;
+
+ if (recvd_frame->num_bufs > 0) {
+ LOGI("[KPI Perf]: superbuf frame_idx %d",
+ recvd_frame->bufs[0]->frame_idx);
+ }
+
+ // DUMP RAW if available
+ property_get("persist.camera.zsl_raw", value, "0");
+ dump_raw = atoi(value) > 0 ? true : false;
+ if (dump_raw) {
+ for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+ if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+ mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
+ QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
+ if (NULL != pStream) {
+ pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+ }
+ break;
+ }
+ }
+ }
+
+ for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+ if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+ mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
+ QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
+ if (NULL != pStream) {
+ pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
+ }
+ break;
+ }
+ }
+ //
+ // whether need FD Metadata along with Snapshot frame in ZSL mode
+ if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
+ //Need Face Detection result for snapshot frames
+ //Get the Meta Data frames
+ mm_camera_buf_def_t *pMetaFrame = NULL;
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pMetaFrame = frame->bufs[i]; //find the metadata
+ break;
+ }
+ }
+ }
+
+ if(pMetaFrame != NULL){
+ metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
+ //send the face detection info
+ cam_faces_data_t faces_data;
+ pme->fillFacesData(faces_data, pMetaData);
+ //HARD CODE here before MCT can support
+ faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT;
+
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
+ payload->faces_data = faces_data;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt face_detection_result failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t");
+ }
+ }
+ }
+
+ property_get("persist.camera.dumpmetadata", value, "0");
+ int32_t enabled = atoi(value);
+ if (enabled) {
+ mm_camera_buf_def_t *pMetaFrame = NULL;
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pMetaFrame = frame->bufs[i];
+ if (pMetaFrame != NULL &&
+ ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+ pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ property_get("persist.camera.zsl_matching", value, "0");
+ log_matching = atoi(value) > 0 ? true : false;
+ if (log_matching) {
+ LOGH("ZSL super buffer contains:");
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL ) {
+ LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ",
+ frame->bufs[i]->buf_idx,
+ frame->bufs[i]->frame_idx,
+ pStream->getMyType(),
+ frame->bufs[i]->ts.tv_sec,
+ frame->bufs[i]->ts.tv_nsec);
+ }
+ }
+ }
+
+ // Wait on Postproc initialization if needed
+ // then send to postprocessor
+ if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+ (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+ LOGE("Failed to trigger process data");
+ pChannel->bufDone(recvd_frame);
+ free(frame);
+ frame = NULL;
+ return;
+ }
+
+ LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION : selectScene
+ *
+ * DESCRIPTION: send a preview callback when a specific selected scene is applied
+ *
+ * PARAMETERS :
+ * @pChannel: Camera channel
+ * @frame : Bundled super buffer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel,
+ mm_camera_super_buf_t *frame)
+{
+ mm_camera_buf_def_t *pMetaFrame = NULL;
+ QCameraStream *pStream = NULL;
+ int32_t rc = NO_ERROR;
+
+ if ((NULL == frame) || (NULL == pChannel)) {
+ LOGE("Invalid scene select input");
+ return BAD_VALUE;
+ }
+
+ cam_scene_mode_type selectedScene = mParameters.getSelectedScene();
+ if (CAM_SCENE_MODE_MAX == selectedScene) {
+ LOGL("No selected scene");
+ return NO_ERROR;
+ }
+
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pMetaFrame = frame->bufs[i];
+ break;
+ }
+ }
+ }
+
+ if (NULL == pMetaFrame) {
+ LOGE("No metadata buffer found in scene select super buffer");
+ return NO_INIT;
+ }
+
+ metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
+
+ IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) {
+ if ((*scene == selectedScene) &&
+ (mDataCb != NULL) &&
+ (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) {
+ mm_camera_buf_def_t *preview_frame = NULL;
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+ preview_frame = frame->bufs[i];
+ break;
+ }
+ }
+ }
+ if (preview_frame) {
+ QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info;
+ uint32_t idx = preview_frame->buf_idx;
+ rc = sendPreviewCallback(pStream, memory, idx);
+ if (NO_ERROR != rc) {
+ LOGE("Error triggering scene select preview callback");
+ } else {
+ mParameters.setSelectedScene(CAM_SCENE_MODE_MAX);
+ }
+ } else {
+ LOGE("No preview buffer found in scene select super buffer");
+ return NO_INIT;
+ }
+ }
+ } else {
+ LOGE("No current scene metadata!");
+ rc = NO_INIT;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : capture_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
+ * mm-camera-interface
+ *
+ * PARAMETERS :
+ * @recvd_frame : received super buffer
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : recvd_frame will be released after this call by caller, so if
+ * async operation needed for recvd_frame, it's our responsibility
+ * to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+ void *userdata)
+{
+ KPI_ATRACE_CALL();
+ char value[PROPERTY_VALUE_MAX];
+ LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+ LOGE("camera obj not valid");
+ return;
+ }
+
+ QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
+ if (pChannel == NULL ||
+ pChannel->getMyHandle() != recvd_frame->ch_id) {
+ LOGE("Capture channel doesn't exist, return here");
+ return;
+ }
+
+ // save a copy for the superbuf
+ mm_camera_super_buf_t* frame =
+ (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("Error allocating memory to save received_frame structure.");
+ pChannel->bufDone(recvd_frame);
+ return;
+ }
+ *frame = *recvd_frame;
+
+ if (recvd_frame->num_bufs > 0) {
+ LOGI("[KPI Perf]: superbuf frame_idx %d",
+ recvd_frame->bufs[0]->frame_idx);
+ }
+
+ for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
+ if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) {
+ mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
+ QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
+ if ( NULL != pStream ) {
+ pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
+ }
+ break;
+ }
+ }
+
+ property_get("persist.camera.dumpmetadata", value, "0");
+ int32_t enabled = atoi(value);
+ if (enabled) {
+ mm_camera_buf_def_t *pMetaFrame = NULL;
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pMetaFrame = frame->bufs[i]; //find the metadata
+ if (pMetaFrame != NULL &&
+ ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+ pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ // Wait on Postproc initialization if needed
+ // then send to postprocessor
+ if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+ (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+ LOGE("Failed to trigger process data");
+ pChannel->bufDone(recvd_frame);
+ free(frame);
+ frame = NULL;
+ return;
+ }
+
+/* START of test register face image for face authentication */
+#ifdef QCOM_TEST_FACE_REGISTER_FACE
+ static uint8_t bRunFaceReg = 1;
+
+ if (bRunFaceReg > 0) {
+ // find snapshot frame
+ QCameraStream *main_stream = NULL;
+ mm_camera_buf_def_t *main_frame = NULL;
+ for (int i = 0; i < recvd_frame->num_bufs; i++) {
+ QCameraStream *pStream =
+ pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ main_stream = pStream;
+ main_frame = recvd_frame->bufs[i];
+ break;
+ }
+ }
+ }
+ if (main_stream != NULL && main_frame != NULL) {
+ int32_t faceId = -1;
+ cam_pp_offline_src_config_t config;
+ memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
+ config.num_of_bufs = 1;
+ main_stream->getFormat(config.input_fmt);
+ main_stream->getFrameDimension(config.input_dim);
+ main_stream->getFrameOffset(config.input_buf_planes.plane_info);
+ LOGH("DEBUG: registerFaceImage E");
+ int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
+ LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
+ bRunFaceReg = 0;
+ }
+ }
+
+#endif
+/* END of test register face image for face authentication */
+
+ LOGH("[KPI Perf]: X");
+}
+#ifdef TARGET_TS_MAKEUP
+bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,
+ QCameraStream * pStream) {
+ LOGD("begin");
+ bool bRet = false;
+ if (pStream == NULL || pFrame == NULL) {
+ bRet = false;
+ LOGH("pStream == NULL || pFrame == NULL");
+ } else {
+ bRet = TsMakeupProcess(pFrame, pStream, mFaceRect);
+ }
+ LOGD("end bRet = %d ",bRet);
+ return bRet;
+}
+
+bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,
+ QCameraStream * pStream) {
+ LOGD("begin");
+ bool bRet = false;
+ if (pStream == NULL || pFrame == NULL) {
+ bRet = false;
+ LOGH("pStream == NULL || pFrame == NULL");
+ } else {
+ cam_frame_len_offset_t offset;
+ memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+ pStream->getFrameOffset(offset);
+
+ cam_dimension_t dim;
+ pStream->getFrameDimension(dim);
+
+ unsigned char *yBuf = (unsigned char*)pFrame->buffer;
+ unsigned char *uvBuf = yBuf + offset.mp[0].len;
+ TSMakeupDataEx inMakeupData;
+ inMakeupData.frameWidth = dim.width;
+ inMakeupData.frameHeight = dim.height;
+ inMakeupData.yBuf = yBuf;
+ inMakeupData.uvBuf = uvBuf;
+ inMakeupData.yStride = offset.mp[0].stride;
+ inMakeupData.uvStride = offset.mp[1].stride;
+ LOGD("detect begin");
+ TSHandle fd_handle = ts_detectface_create_context();
+ if (fd_handle != NULL) {
+ cam_format_t fmt;
+ pStream->getFormat(fmt);
+ int iret = ts_detectface_detectEx(fd_handle, &inMakeupData);
+ LOGD("ts_detectface_detect iret = %d",iret);
+ if (iret <= 0) {
+ bRet = false;
+ } else {
+ TSRect faceRect;
+ memset(&faceRect,-1,sizeof(TSRect));
+ iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL);
+ LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld,"
+ "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld"
+ ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom);
+ bRet = TsMakeupProcess(pFrame,pStream,faceRect);
+ }
+ ts_detectface_destroy_context(&fd_handle);
+ fd_handle = NULL;
+ } else {
+ LOGH("fd_handle == NULL");
+ }
+ LOGD("detect end");
+ }
+ LOGD("end bRet = %d ",bRet);
+ return bRet;
+}
+
+bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame,
+ QCameraStream * pStream,TSRect& faceRect) {
+ bool bRet = false;
+ LOGD("begin");
+ if (pStream == NULL || pFrame == NULL) {
+ LOGH("pStream == NULL || pFrame == NULL ");
+ return false;
+ }
+
+ int whiteLevel, cleanLevel;
+ bool enableMakeup = (faceRect.left > -1) &&
+ (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel));
+ if (enableMakeup) {
+ cam_dimension_t dim;
+ cam_frame_len_offset_t offset;
+ pStream->getFrameDimension(dim);
+ pStream->getFrameOffset(offset);
+ unsigned char *tempOriBuf = NULL;
+
+ tempOriBuf = (unsigned char*)pFrame->buffer;
+ unsigned char *yBuf = tempOriBuf;
+ unsigned char *uvBuf = tempOriBuf + offset.mp[0].len;
+ unsigned char *tmpBuf = new unsigned char[offset.frame_len];
+ if (tmpBuf == NULL) {
+ LOGH("tmpBuf == NULL ");
+ return false;
+ }
+ TSMakeupDataEx inMakeupData, outMakeupData;
+ whiteLevel = whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel);
+ cleanLevel = cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel);
+ inMakeupData.frameWidth = dim.width; // NV21 Frame width > 0
+ inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
+ inMakeupData.yBuf = yBuf; // Y buffer pointer
+ inMakeupData.uvBuf = uvBuf; // VU buffer pointer
+ inMakeupData.yStride = offset.mp[0].stride;
+ inMakeupData.uvStride = offset.mp[1].stride;
+ outMakeupData.frameWidth = dim.width; // NV21 Frame width > 0
+ outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
+ outMakeupData.yBuf = tmpBuf; // Y buffer pointer
+ outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer
+ outMakeupData.yStride = offset.mp[0].stride;
+ outMakeupData.uvStride = offset.mp[1].stride;
+ LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d",
+ faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel);
+ ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel);
+ memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len);
+ QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info;
+ memory->cleanCache(pFrame->buf_idx);
+ if (tmpBuf != NULL) {
+ delete[] tmpBuf;
+ tmpBuf = NULL;
+ }
+ }
+ LOGD("end bRet = %d ",bRet);
+ return bRet;
+}
+#endif
+/*===========================================================================
+ * FUNCTION : postproc_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
+ * mm-camera-interface
+ *
+ * PARAMETERS :
+ * @recvd_frame : received super buffer
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : recvd_frame will be released after this call by caller, so if
+ * async operation needed for recvd_frame, it's our responsibility
+ * to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+ void *userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf]: E");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+ LOGE("camera obj not valid");
+ return;
+ }
+
+ // save a copy for the superbuf
+ mm_camera_super_buf_t* frame =
+ (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("Error allocating memory to save received_frame structure.");
+ return;
+ }
+ *frame = *recvd_frame;
+
+ if (recvd_frame->num_bufs > 0) {
+ LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx);
+ }
+ // Wait on JPEG create session
+ pme->waitDeferredWork(pme->mJpegJob);
+
+ // send to postprocessor
+ pme->m_postprocessor.processPPData(frame);
+
+ ATRACE_INT("Camera:Reprocess", 0);
+ LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION : synchronous_stream_cb_routine
+ *
+ * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : This Function is excecuted in mm-interface context.
+ * Avoid adding latency on this thread.
+ *==========================================================================*/
+void QCamera2HardwareInterface::synchronous_stream_cb_routine(
+ mm_camera_super_buf_t *super_frame, QCameraStream * stream,
+ void *userdata)
+{
+ nsecs_t frameTime = 0, mPreviewTimestamp = 0;
+ int err = NO_ERROR;
+
+ ATRACE_CALL();
+ LOGH("[KPI Perf] : BEGIN");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ QCameraGrallocMemory *memory = NULL;
+
+ if (pme == NULL) {
+ LOGE("Invalid hardware object");
+ return;
+ }
+ if (super_frame == NULL) {
+ LOGE("Invalid super buffer");
+ return;
+ }
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ if (NULL == frame) {
+ LOGE("Frame is NULL");
+ return;
+ }
+
+ if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) {
+ LOGE("This is only for PREVIEW stream for now");
+ return;
+ }
+
+ if(pme->m_bPreviewStarted) {
+ LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
+ pme->m_bPreviewStarted = false;
+ }
+
+ if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+ pthread_mutex_lock(&pme->mGrallocLock);
+ pme->mLastPreviewFrameID = frame->frame_idx;
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ LOGH("preview is not running, no need to process");
+ return;
+ }
+
+ frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
+ // Calculate the future presentation time stamp for displaying frames at regular interval
+ mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime);
+ stream->mStreamTimestamp = frameTime;
+ memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+#ifdef TARGET_TS_MAKEUP
+ pme->TsMakeupProcess_Preview(frame,stream);
+#endif
+
+ // Enqueue buffer to gralloc.
+ uint32_t idx = frame->buf_idx;
+ LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld",
+ pme, idx, frameTime, mPreviewTimestamp);
+ err = memory->enqueueBuffer(idx, mPreviewTimestamp);
+
+ if (err == NO_ERROR) {
+ pthread_mutex_lock(&pme->mGrallocLock);
+ pme->mLastPreviewFrameID = frame->frame_idx;
+ pme->mEnqueuedBuffers++;
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ } else {
+ LOGE("Enqueue Buffer failed");
+ }
+
+ LOGH("[KPI Perf] : END");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ * normal case with display.
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done. The new
+ * preview frame will be sent to display, and an older frame
+ * will be dequeued from display and needs to be returned back
+ * to kernel for future use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+ QCameraStream * stream,
+ void *userdata)
+{
+ KPI_ATRACE_CALL();
+ LOGH("[KPI Perf] : BEGIN");
+ int err = NO_ERROR;
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+ uint8_t dequeueCnt = 0;
+
+ if (pme == NULL) {
+ LOGE("Invalid hardware object");
+ free(super_frame);
+ return;
+ }
+ if (memory == NULL) {
+ LOGE("Invalid memory object");
+ free(super_frame);
+ return;
+ }
+
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ if (NULL == frame) {
+ LOGE("preview frame is NLUL");
+ free(super_frame);
+ return;
+ }
+
+ // For instant capture and for instant AEC, keep track of the frame counter.
+ // This count will be used to check against the corresponding bound values.
+ if (pme->mParameters.isInstantAECEnabled() ||
+ pme->mParameters.isInstantCaptureEnabled()) {
+ pme->mInstantAecFrameCount++;
+ }
+
+ pthread_mutex_lock(&pme->mGrallocLock);
+ if (!stream->isSyncCBEnabled()) {
+ pme->mLastPreviewFrameID = frame->frame_idx;
+ }
+ if (((!stream->isSyncCBEnabled()) &&
+ (!pme->needProcessPreviewFrame(frame->frame_idx))) ||
+ ((stream->isSyncCBEnabled()) &&
+ (memory->isBufOwnedByCamera(frame->buf_idx)))) {
+ //If buffer owned by camera, then it is not enqueued to display.
+ // bufDone it back to backend.
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ LOGH("preview is not running, no need to process");
+ stream->bufDone(frame->buf_idx);
+ free(super_frame);
+ return;
+ } else {
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ }
+
+ if (pme->needDebugFps()) {
+ pme->debugShowPreviewFPS();
+ }
+
+ uint32_t idx = frame->buf_idx;
+
+ pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
+
+ if(pme->m_bPreviewStarted) {
+ LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
+ pme->m_bPreviewStarted = false ;
+ }
+
+ if (!stream->isSyncCBEnabled()) {
+ LOGD("Enqueue Buffer to display %d", idx);
+#ifdef TARGET_TS_MAKEUP
+ pme->TsMakeupProcess_Preview(frame,stream);
+#endif
+ err = memory->enqueueBuffer(idx);
+
+ if (err == NO_ERROR) {
+ pthread_mutex_lock(&pme->mGrallocLock);
+ pme->mEnqueuedBuffers++;
+ dequeueCnt = pme->mEnqueuedBuffers;
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ } else {
+ LOGE("Enqueue Buffer failed");
+ }
+ } else {
+ pthread_mutex_lock(&pme->mGrallocLock);
+ dequeueCnt = pme->mEnqueuedBuffers;
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ }
+
+ // Display the buffer.
+ LOGD("%p displayBuffer %d E", pme, idx);
+ uint8_t numMapped = memory->getMappable();
+
+ for (uint8_t i = 0; i < dequeueCnt; i++) {
+ int dequeuedIdx = memory->dequeueBuffer();
+ if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
+ LOGE("Invalid dequeued buffer index %d from display",
+ dequeuedIdx);
+ break;
+ } else {
+ pthread_mutex_lock(&pme->mGrallocLock);
+ pme->mEnqueuedBuffers--;
+ pthread_mutex_unlock(&pme->mGrallocLock);
+ if (dequeuedIdx >= numMapped) {
+ // This buffer has not yet been mapped to the backend
+ err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
+ if (memory->checkIfAllBuffersMapped()) {
+ // check if mapping is done for all the buffers
+ // Signal the condition for create jpeg session
+ Mutex::Autolock l(pme->mMapLock);
+ pme->mMapCond.signal();
+ LOGH("Mapping done for all bufs");
+ } else {
+ LOGH("All buffers are not yet mapped");
+ }
+ }
+ }
+
+ if (err < 0) {
+ LOGE("buffer mapping failed %d", err);
+ } else {
+ // Return dequeued buffer back to driver
+ err = stream->bufDone((uint32_t)dequeuedIdx);
+ if ( err < 0) {
+ LOGW("stream bufDone failed %d", err);
+ }
+ }
+ }
+
+ // Handle preview data callback
+ if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
+ if (pme->needSendPreviewCallback() &&
+ (!pme->mParameters.isSceneSelectionEnabled())) {
+ int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
+ if (NO_ERROR != rc) {
+ LOGW("Preview callback was not sent succesfully");
+ }
+ }
+ }
+
+ free(super_frame);
+ LOGH("[KPI Perf] : END");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : sendPreviewCallback
+ *
+ * DESCRIPTION: helper function for triggering preview callbacks
+ *
+ * PARAMETERS :
+ * @stream : stream object
+ * @memory : Stream memory allocator
+ * @idx : buffer index
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
+ QCameraMemory *memory, uint32_t idx)
+{
+ camera_memory_t *previewMem = NULL;
+ camera_memory_t *data = NULL;
+ camera_memory_t *dataToApp = NULL;
+ size_t previewBufSize = 0;
+ size_t previewBufSizeFromCallback = 0;
+ cam_dimension_t preview_dim;
+ cam_format_t previewFmt;
+ int32_t rc = NO_ERROR;
+ int32_t yStride = 0;
+ int32_t yScanline = 0;
+ int32_t uvStride = 0;
+ int32_t uvScanline = 0;
+ int32_t uStride = 0;
+ int32_t uScanline = 0;
+ int32_t vStride = 0;
+ int32_t vScanline = 0;
+ int32_t yStrideToApp = 0;
+ int32_t uvStrideToApp = 0;
+ int32_t yScanlineToApp = 0;
+ int32_t uvScanlineToApp = 0;
+ int32_t srcOffset = 0;
+ int32_t dstOffset = 0;
+ int32_t srcBaseOffset = 0;
+ int32_t dstBaseOffset = 0;
+ int i;
+
+ if ((NULL == stream) || (NULL == memory)) {
+ LOGE("Invalid preview callback input");
+ return BAD_VALUE;
+ }
+
+ cam_stream_info_t *streamInfo =
+ reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
+ if (NULL == streamInfo) {
+ LOGE("Invalid streamInfo");
+ return BAD_VALUE;
+ }
+
+ stream->getFrameDimension(preview_dim);
+ stream->getFormat(previewFmt);
+
+ yStrideToApp = preview_dim.width;
+ yScanlineToApp = preview_dim.height;
+ uvStrideToApp = yStrideToApp;
+ uvScanlineToApp = yScanlineToApp / 2;
+
+ /* The preview buffer size in the callback should be
+ * (width*height*bytes_per_pixel). As all preview formats we support,
+ * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+ * We need to put a check if some other formats are supported in future. */
+ if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
+ (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
+ (previewFmt == CAM_FORMAT_YUV_420_YV12) ||
+ (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) ||
+ (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) ||
+ (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) {
+ if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
+ yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
+ yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
+ uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
+ uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
+ vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
+ vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
+
+ previewBufSize = (size_t)
+ (yStride * yScanline + uStride * uScanline + vStride * vScanline);
+ previewBufSizeFromCallback = previewBufSize;
+ } else {
+ yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
+ yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
+ uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
+ uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
+
+ previewBufSize = (size_t)
+ ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
+
+ previewBufSizeFromCallback = (size_t)
+ ((yStride * yScanline) + (uvStride * uvScanline));
+ }
+ if(previewBufSize == previewBufSizeFromCallback) {
+ previewMem = mGetMemory(memory->getFd(idx),
+ previewBufSize, 1, mCallbackCookie);
+ if (!previewMem || !previewMem->data) {
+ LOGE("mGetMemory failed.\n");
+ return NO_MEMORY;
+ } else {
+ data = previewMem;
+ }
+ } else {
+ data = memory->getMemory(idx, false);
+ dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
+ if (!dataToApp || !dataToApp->data) {
+ LOGE("mGetMemory failed.\n");
+ return NO_MEMORY;
+ }
+
+ for (i = 0; i < preview_dim.height; i++) {
+ srcOffset = i * yStride;
+ dstOffset = i * yStrideToApp;
+
+ memcpy((unsigned char *) dataToApp->data + dstOffset,
+ (unsigned char *) data->data + srcOffset,
+ (size_t)yStrideToApp);
+ }
+
+ srcBaseOffset = yStride * yScanline;
+ dstBaseOffset = yStrideToApp * yScanlineToApp;
+
+ for (i = 0; i < preview_dim.height/2; i++) {
+ srcOffset = i * uvStride + srcBaseOffset;
+ dstOffset = i * uvStrideToApp + dstBaseOffset;
+
+ memcpy((unsigned char *) dataToApp->data + dstOffset,
+ (unsigned char *) data->data + srcOffset,
+ (size_t)yStrideToApp);
+ }
+ }
+ } else {
+ /*Invalid Buffer content. But can be used as a first preview frame trigger in
+ framework/app */
+ previewBufSize = (size_t)
+ ((yStrideToApp * yScanlineToApp) +
+ (uvStrideToApp * uvScanlineToApp));
+ previewBufSizeFromCallback = 0;
+ LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
+ previewBufSize);
+ dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
+ if (!dataToApp || !dataToApp->data) {
+ LOGE("mGetMemory failed.\n");
+ return NO_MEMORY;
+ }
+ }
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+ if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
+ previewBufSize == previewBufSizeFromCallback) {
+ cbArg.data = data;
+ } else {
+ cbArg.data = dataToApp;
+ }
+ if ( previewMem ) {
+ cbArg.user_data = previewMem;
+ cbArg.release_cb = releaseCameraMemory;
+ } else if (dataToApp) {
+ cbArg.user_data = dataToApp;
+ cbArg.release_cb = releaseCameraMemory;
+ }
+ cbArg.cookie = this;
+ rc = m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGW("fail sending notification");
+ if (previewMem) {
+ previewMem->release(previewMem);
+ } else if (dataToApp) {
+ dataToApp->release(dataToApp);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : nodisplay_preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ * no-display case
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
+ mm_camera_super_buf_t *super_frame,
+ QCameraStream *stream,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf] E");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ if (NULL == frame) {
+ LOGE("preview frame is NULL");
+ free(super_frame);
+ return;
+ }
+
+ if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+ LOGH("preview is not running, no need to process");
+ stream->bufDone(frame->buf_idx);
+ free(super_frame);
+ return;
+ }
+
+ if (pme->needDebugFps()) {
+ pme->debugShowPreviewFPS();
+ }
+
+ QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+ camera_memory_t *preview_mem = NULL;
+ if (previewMemObj != NULL) {
+ preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
+ }
+ if (NULL != previewMemObj && NULL != preview_mem) {
+ pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
+
+ if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
+ pme->needSendPreviewCallback() &&
+ (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+ cbArg.data = preview_mem;
+ cbArg.user_data = (void *) &frame->buf_idx;
+ cbArg.cookie = stream;
+ cbArg.release_cb = returnStreamBuffer;
+ int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE ("fail sending data notify");
+ stream->bufDone(frame->buf_idx);
+ }
+ } else {
+ stream->bufDone(frame->buf_idx);
+ }
+ }
+ free(super_frame);
+ LOGH("[KPI Perf] X");
+}
+
+/*===========================================================================
+ * FUNCTION : rdi_mode_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle RDI frame from preview stream in
+ * rdi mode case
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::rdi_mode_stream_cb_routine(
+ mm_camera_super_buf_t *super_frame,
+ QCameraStream *stream,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGH("RDI_DEBUG Enter");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ free(super_frame);
+ return;
+ }
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ if (NULL == frame) {
+ LOGE("preview frame is NLUL");
+ goto end;
+ }
+ if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+ LOGE("preview is not running, no need to process");
+ stream->bufDone(frame->buf_idx);
+ goto end;
+ }
+ if (pme->needDebugFps()) {
+ pme->debugShowPreviewFPS();
+ }
+ // Non-secure Mode
+ if (!pme->isSecureMode()) {
+ QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+ if (NULL == previewMemObj) {
+ LOGE("previewMemObj is NULL");
+ stream->bufDone(frame->buf_idx);
+ goto end;
+ }
+
+ camera_memory_t *preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
+ if (NULL != preview_mem) {
+ previewMemObj->cleanCache(frame->buf_idx);
+ // Dump RAW frame
+ pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
+ // Notify Preview callback frame
+ if (pme->needProcessPreviewFrame(frame->frame_idx) &&
+ pme->mDataCb != NULL &&
+ pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+ cbArg.data = preview_mem;
+ cbArg.user_data = (void *) &frame->buf_idx;
+ cbArg.cookie = stream;
+ cbArg.release_cb = returnStreamBuffer;
+ pme->m_cbNotifier.notifyCallback(cbArg);
+ } else {
+ LOGE("preview_mem is NULL");
+ stream->bufDone(frame->buf_idx);
+ }
+ }
+ else {
+ LOGE("preview_mem is NULL");
+ stream->bufDone(frame->buf_idx);
+ }
+ } else {
+ // Secure Mode
+ // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode
+ QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+ if (NULL == previewMemObj) {
+ LOGE("previewMemObj is NULL");
+ stream->bufDone(frame->buf_idx);
+ goto end;
+ }
+
+ int fd = previewMemObj->getFd(frame->buf_idx);
+ LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx);
+ if (pme->needProcessPreviewFrame(frame->frame_idx) &&
+ pme->mDataCb != NULL &&
+ pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
+ // Prepare Callback structure
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+#ifndef VANILLA_HAL
+ cbArg.ext1 = CAMERA_FRAME_DATA_FD;
+ cbArg.ext2 = fd;
+#endif
+ cbArg.user_data = (void *) &frame->buf_idx;
+ cbArg.cookie = stream;
+ cbArg.release_cb = returnStreamBuffer;
+ pme->m_cbNotifier.notifyCallback(cbArg);
+ } else {
+ LOGH("No need to process preview frame, return buffer");
+ stream->bufDone(frame->buf_idx);
+ }
+ }
+end:
+ free(super_frame);
+ LOGH("RDI_DEBUG Exit");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : postview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle post frame from postview stream
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+ QCameraStream *stream,
+ void *userdata)
+{
+ ATRACE_CALL();
+ int err = NO_ERROR;
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+ if (pme == NULL) {
+ LOGE("Invalid hardware object");
+ free(super_frame);
+ return;
+ }
+ if (memory == NULL) {
+ LOGE("Invalid memory object");
+ free(super_frame);
+ return;
+ }
+
+ LOGH("[KPI Perf] : BEGIN");
+
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ if (NULL == frame) {
+ LOGE("preview frame is NULL");
+ free(super_frame);
+ return;
+ }
+
+ QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
+ if (NULL != memObj) {
+ pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
+ }
+
+ // Return buffer back to driver
+ err = stream->bufDone(frame->buf_idx);
+ if ( err < 0) {
+ LOGE("stream bufDone failed %d", err);
+ }
+
+ free(super_frame);
+ LOGH("[KPI Perf] : END");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : video_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle video frame from video stream
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done. video
+ * frame will be sent to video encoder. Once video encoder is
+ * done with the video frame, it will call another API
+ * (release_recording_frame) to return the frame back
+ *==========================================================================*/
+void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+ QCameraStream *stream,
+ void *userdata)
+{
+ ATRACE_CALL();
+ QCameraVideoMemory *videoMemObj = NULL;
+ camera_memory_t *video_mem = NULL;
+ nsecs_t timeStamp = 0;
+ bool triggerTCB = FALSE;
+
+ LOGH("[KPI Perf] : BEGIN");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+
+ if (pme->needDebugFps()) {
+ pme->debugShowVideoFPS();
+ }
+ if(pme->m_bRecordStarted) {
+ LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME");
+ pme->m_bRecordStarted = false ;
+ }
+ LOGD("Stream(%d), Timestamp: %ld %ld",
+ frame->stream_id,
+ frame->ts.tv_sec,
+ frame->ts.tv_nsec);
+
+ if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
+ if (pme->mParameters.getVideoBatchSize() == 0) {
+ timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+ + frame->ts.tv_nsec;
+ LOGD("Video frame to encoder TimeStamp : %lld batch = 0",
+ timeStamp);
+ pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
+ videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+ video_mem = NULL;
+ if (NULL != videoMemObj) {
+ video_mem = videoMemObj->getMemory(frame->buf_idx,
+ (pme->mStoreMetaDataInFrame > 0)? true : false);
+ videoMemObj->updateNativeHandle(frame->buf_idx);
+ triggerTCB = TRUE;
+ }
+ } else {
+ //Handle video batch callback
+ native_handle_t *nh = NULL;
+ pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
+ QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+ if ((stream->mCurMetaMemory == NULL)
+ || (stream->mCurBufIndex == -1)) {
+ //get Free metadata available
+ for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) {
+ if (stream->mStreamMetaMemory[i].consumerOwned == 0) {
+ stream->mCurMetaMemory = videoMemObj->getMemory(i,true);
+ stream->mCurBufIndex = 0;
+ stream->mCurMetaIndex = i;
+ stream->mStreamMetaMemory[i].numBuffers = 0;
+ break;
+ }
+ }
+ }
+ video_mem = stream->mCurMetaMemory;
+ nh = videoMemObj->updateNativeHandle(stream->mCurMetaIndex);
+ if (video_mem == NULL || nh == NULL) {
+ LOGE("No Free metadata. Drop this frame");
+ stream->mCurBufIndex = -1;
+ stream->bufDone(frame->buf_idx);
+ free(super_frame);
+ return;
+ }
+
+ int index = stream->mCurBufIndex;
+ int fd_cnt = pme->mParameters.getVideoBatchSize();
+ nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+ + frame->ts.tv_nsec;
+ if (index == 0) {
+ stream->mFirstTimeStamp = frame_ts;
+ }
+
+ stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index]
+ = (uint8_t)frame->buf_idx;
+ stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++;
+ stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned
+ = TRUE;
+ /*
+ * data[0] => FD
+ * data[mNumFDs + 1] => OFFSET
+ * data[mNumFDs + 2] => SIZE
+ * data[mNumFDs + 3] => Usage Flag (Color format/Compression)
+ * data[mNumFDs + 4] => TIMESTAMP
+ * data[mNumFDs + 5] => FORMAT
+ */
+ nh->data[index] = videoMemObj->getFd(frame->buf_idx);
+ nh->data[index + fd_cnt] = 0;
+ nh->data[index + (fd_cnt * 2)] = (int)videoMemObj->getSize(frame->buf_idx);
+ nh->data[index + (fd_cnt * 3)] = videoMemObj->getUsage();
+ nh->data[index + (fd_cnt * 4)] = (int)(frame_ts - stream->mFirstTimeStamp);
+ nh->data[index + (fd_cnt * 5)] = videoMemObj->getFormat();
+ stream->mCurBufIndex++;
+ if (stream->mCurBufIndex == fd_cnt) {
+ timeStamp = stream->mFirstTimeStamp;
+ LOGD("Video frame to encoder TimeStamp : %lld batch = %d",
+ timeStamp, fd_cnt);
+ stream->mCurBufIndex = -1;
+ stream->mCurMetaIndex = -1;
+ stream->mCurMetaMemory = NULL;
+ triggerTCB = TRUE;
+ }
+ }
+ } else {
+ videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+ video_mem = NULL;
+ native_handle_t *nh = NULL;
+ int fd_cnt = frame->user_buf.bufs_used;
+ if (NULL != videoMemObj) {
+ video_mem = videoMemObj->getMemory(frame->buf_idx, true);
+ nh = videoMemObj->updateNativeHandle(frame->buf_idx);
+ } else {
+ LOGE("videoMemObj NULL");
+ }
+
+ if (nh != NULL) {
+ timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+ + frame->ts.tv_nsec;
+ LOGD("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d",
+ timeStamp, frame->fd, frame->buf_idx, fd_cnt);
+
+ for (int i = 0; i < fd_cnt; i++) {
+ if (frame->user_buf.buf_idx[i] >= 0) {
+ mm_camera_buf_def_t *plane_frame =
+ &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]];
+ QCameraVideoMemory *frameobj =
+ (QCameraVideoMemory *)plane_frame->mem_info;
+ int usage = frameobj->getUsage();
+ nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL
+ + plane_frame->ts.tv_nsec;
+ /*
+ data[0] => FD
+ data[mNumFDs + 1] => OFFSET
+ data[mNumFDs + 2] => SIZE
+ data[mNumFDs + 3] => Usage Flag (Color format/Compression)
+ data[mNumFDs + 4] => TIMESTAMP
+ data[mNumFDs + 5] => FORMAT
+ */
+ nh->data[i] = frameobj->getFd(plane_frame->buf_idx);
+ nh->data[fd_cnt + i] = 0;
+ nh->data[(2 * fd_cnt) + i] = (int)frameobj->getSize(plane_frame->buf_idx);
+ nh->data[(3 * fd_cnt) + i] = usage;
+ nh->data[(4 * fd_cnt) + i] = (int)(frame_ts - timeStamp);
+ nh->data[(5 * fd_cnt) + i] = frameobj->getFormat();
+ LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d",
+ (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx);
+ pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO);
+ }
+ }
+ triggerTCB = TRUE;
+ } else {
+ LOGE("No Video Meta Available. Return Buffer");
+ stream->bufDone(super_frame->bufs[0]->buf_idx);
+ }
+ }
+
+ if ((NULL != video_mem) && (triggerTCB == TRUE)) {
+ if ((pme->mDataCbTimestamp != NULL) &&
+ pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
+ cbArg.data = video_mem;
+ cbArg.timestamp = timeStamp;
+ int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
+ if (rc != NO_ERROR) {
+ LOGE("fail sending data notify");
+ stream->bufDone(frame->buf_idx);
+ }
+ }
+ }
+
+ free(super_frame);
+ LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION : snapshot_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : recvd_frame will be released after this call by caller, so if
+ * async operation needed for recvd_frame, it's our responsibility
+ * to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
+ void *userdata)
+{
+ ATRACE_CALL();
+ char value[PROPERTY_VALUE_MAX];
+ QCameraChannel *pChannel = NULL;
+
+ LOGH("[KPI Perf]: E");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ if (pme->isLowPowerMode()) {
+ pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO];
+ } else {
+ pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+ }
+
+ if ((pChannel == NULL) || (pChannel->getMyHandle() != super_frame->ch_id)) {
+ LOGE("Snapshot channel doesn't exist, return here");
+ return;
+ }
+
+ property_get("persist.camera.dumpmetadata", value, "0");
+ int32_t enabled = atoi(value);
+ if (enabled) {
+ if (pChannel == NULL ||
+ pChannel->getMyHandle() != super_frame->ch_id) {
+ LOGE("Capture channel doesn't exist, return here");
+ return;
+ }
+ mm_camera_buf_def_t *pMetaFrame = NULL;
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pMetaFrame = super_frame->bufs[i]; //find the metadata
+ if (pMetaFrame != NULL &&
+ ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+ pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ // save a copy for the superbuf
+ mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("Error allocating memory to save received_frame structure.");
+ pChannel->bufDone(super_frame);
+ return;
+ }
+ *frame = *super_frame;
+
+ if (frame->num_bufs > 0) {
+ LOGI("[KPI Perf]: superbuf frame_idx %d",
+ frame->bufs[0]->frame_idx);
+ }
+
+ if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+ (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+ LOGE("Failed to trigger process data");
+ pChannel->bufDone(super_frame);
+ free(frame);
+ frame = NULL;
+ return;
+ }
+
+ LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION : raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw dump frame from raw stream
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done. For raw
+ * frame, there is no need to send to postprocessor for jpeg
+ * encoding. this function will play shutter and send the data
+ * callback to upper layer. Raw frame buffer will be returned
+ * back to kernel, and frame will be free after use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * /*stream*/,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf] : BEGIN");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ pme->m_postprocessor.processRawData(super_frame);
+ LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION : raw_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle RAW superbuf callback directly from
+ * mm-camera-interface
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : recvd_frame will be released after this call by caller, so if
+ * async operation needed for recvd_frame, it's our responsibility
+ * to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame,
+ void *userdata)
+
+{
+ ATRACE_CALL();
+ char value[PROPERTY_VALUE_MAX];
+
+ LOGH("[KPI Perf]: E");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW];
+ if (pChannel == NULL) {
+ LOGE("RAW channel doesn't exist, return here");
+ return;
+ }
+
+ if (pChannel->getMyHandle() != super_frame->ch_id) {
+ LOGE("Invalid Input super buffer");
+ pChannel->bufDone(super_frame);
+ return;
+ }
+
+ property_get("persist.camera.dumpmetadata", value, "0");
+ int32_t enabled = atoi(value);
+ if (enabled) {
+ mm_camera_buf_def_t *pMetaFrame = NULL;
+ QCameraStream *pStream = NULL;
+ for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+ pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pMetaFrame = super_frame->bufs[i]; //find the metadata
+ if (pMetaFrame != NULL &&
+ ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+ pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw");
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ // save a copy for the superbuf
+ mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("Error allocating memory to save received_frame structure.");
+ pChannel->bufDone(super_frame);
+ return;
+ }
+ *frame = *super_frame;
+
+ if (frame->num_bufs > 0) {
+ LOGI("[KPI Perf]: superbuf frame_idx %d",
+ frame->bufs[0]->frame_idx);
+ }
+
+ // Wait on Postproc initialization if needed
+ // then send to postprocessor
+ if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+ (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+ LOGE("Failed to trigger process data");
+ pChannel->bufDone(super_frame);
+ free(frame);
+ frame = NULL;
+ return;
+ }
+
+ LOGH("[KPI Perf]: X");
+
+}
+
+/*===========================================================================
+ * FUNCTION : preview_raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw frame during standard preview
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * stream,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf] : BEGIN");
+ char value[PROPERTY_VALUE_MAX];
+ bool dump_preview_raw = false, dump_video_raw = false;
+
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
+
+ if (raw_frame != NULL) {
+ property_get("persist.camera.preview_raw", value, "0");
+ dump_preview_raw = atoi(value) > 0 ? true : false;
+ property_get("persist.camera.video_raw", value, "0");
+ dump_video_raw = atoi(value) > 0 ? true : false;
+ if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
+ && dump_video_raw)) {
+ pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+ }
+ stream->bufDone(raw_frame->buf_idx);
+ }
+ free(super_frame);
+
+ LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION : snapshot_raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw frame during standard capture
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * stream,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf] : BEGIN");
+ char value[PROPERTY_VALUE_MAX];
+ bool dump_raw = false;
+
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ property_get("persist.camera.snapshot_raw", value, "0");
+ dump_raw = atoi(value) > 0 ? true : false;
+
+ for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+ if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+ mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
+ if (NULL != stream) {
+ if (dump_raw) {
+ pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+ }
+ stream->bufDone(super_frame->bufs[i]->buf_idx);
+ }
+ break;
+ }
+ }
+
+ free(super_frame);
+
+ LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION : updateMetadata
+ *
+ * DESCRIPTION: Frame related parameter can be updated here
+ *
+ * PARAMETERS :
+ * @pMetaData : pointer to metadata buffer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData)
+{
+ int32_t rc = NO_ERROR;
+
+ if (pMetaData == NULL) {
+ LOGE("Null Metadata buffer");
+ return rc;
+ }
+
+ // Sharpness
+ cam_edge_application_t edge_application;
+ memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
+ edge_application.sharpness = mParameters.getSharpness();
+ if (edge_application.sharpness != 0) {
+ edge_application.edge_mode = CAM_EDGE_MODE_FAST;
+ } else {
+ edge_application.edge_mode = CAM_EDGE_MODE_OFF;
+ }
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+ CAM_INTF_META_EDGE_MODE, edge_application);
+
+ //Effect
+ int32_t prmEffect = mParameters.getEffect();
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect);
+
+ //flip
+ int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip);
+
+ //denoise
+ uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled();
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+ CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise);
+
+ //rotation & device rotation
+ uint32_t prmRotation = mParameters.getJpegRotation();
+ cam_rotation_info_t rotation_info;
+ memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
+ if (prmRotation == 0) {
+ rotation_info.rotation = ROTATE_0;
+ } else if (prmRotation == 90) {
+ rotation_info.rotation = ROTATE_90;
+ } else if (prmRotation == 180) {
+ rotation_info.rotation = ROTATE_180;
+ } else if (prmRotation == 270) {
+ rotation_info.rotation = ROTATE_270;
+ }
+
+ uint32_t device_rotation = mParameters.getDeviceRotation();
+ if (device_rotation == 0) {
+ rotation_info.device_rotation = ROTATE_0;
+ } else if (device_rotation == 90) {
+ rotation_info.device_rotation = ROTATE_90;
+ } else if (device_rotation == 180) {
+ rotation_info.device_rotation = ROTATE_180;
+ } else if (device_rotation == 270) {
+ rotation_info.device_rotation = ROTATE_270;
+ } else {
+ rotation_info.device_rotation = ROTATE_0;
+ }
+
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info);
+
+ // Imglib Dynamic Scene Data
+ cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData();
+ if (mParameters.isStillMoreEnabled()) {
+ cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings();
+ dyn_img_data.input_count = stillmore_cap.burst_count;
+ }
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+ CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data);
+
+ //CPP CDS
+ int32_t prmCDSMode = mParameters.getCDSMode();
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+ CAM_INTF_PARM_CDS_MODE, prmCDSMode);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : metadata_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle metadata frame from metadata stream
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done. Metadata
+ * could have valid entries for face detection result or
+ * histogram statistics information.
+ *==========================================================================*/
+void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * stream,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGD("[KPI Perf] : BEGIN");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
+ if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
+ !pme->mLongshotEnabled) {
+ //Make shutter call back in non ZSL mode once raw frame is received from VFE.
+ pme->playShutter();
+ }
+
+ if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
+ //Dump Tuning data for video
+ pme->dumpMetadataToFile(stream,frame,(char *)"Video");
+ }
+
+ IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
+ // process histogram statistics info
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
+ payload->stats_data = *stats_data;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt histogram failed");
+ free(payload);
+ payload = NULL;
+
+ }
+ } else {
+ LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
+ CAM_INTF_META_FACE_DETECTION, pMetaData) {
+
+ cam_faces_data_t faces_data;
+ pme->fillFacesData(faces_data, pMetaData);
+ faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
+
+ qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
+ payload->faces_data = faces_data;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt face detection failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
+ uint8_t forceAFUpdate = FALSE;
+ //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
+ //upper layers. But in scenarios where metadata drops especially which contain important
+ //AF information, APP will wait indefinitely for focus result resulting in capture hang.
+ //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
+ //This will help overcome metadata drop issue with the earlier approach.
+ //3. But sometimes AF state transitions can happen so fast within same metadata due to
+ //which HAL will receive only the final AF state. HAL may perceive this as no change in AF
+ //state depending on the state transitions happened (for example state A -> B -> A).
+ //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
+ //we check state transition at both HAL level and AF module level. We rely on
+ //'state transition' meta field set by AF module for the state transition detected by it.
+ IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
+ forceAFUpdate = *stateChange;
+ }
+ //This is a special scenario in which when scene modes like landscape are selected, AF mode
+ //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
+ //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
+ //change here and trigger AF callback @ processAutoFocusEvent().
+ IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
+ if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
+ pme->mActiveAF){
+ forceAFUpdate = TRUE;
+ }
+ }
+ if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
+ cam_af_state_t prevFocusState = pme->m_currentFocusState;
+ pme->m_currentFocusState = (cam_af_state_t)(*afState);
+ qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
+ payload->focus_data.focus_state = (cam_af_state_t)(*afState);
+ //Need to flush ZSL Q only if we are transitioning from scanning state
+ //to focused/not focused state.
+ payload->focus_data.flush_info.needFlush =
+ ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
+ (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
+ ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
+ (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
+ payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
+
+ IF_META_AVAILABLE(float, focusDistance,
+ CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
+ payload->focus_data.focus_dist.
+ focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
+ }
+ IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
+ payload->focus_data.focus_dist.
+ focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
+ payload->focus_data.focus_dist.
+ focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
+ }
+ IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
+ payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
+ }
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt focus failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
+ }
+ }
+ }
+
+ IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
+ if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
+ LOGE("Invalid num_of_streams %d in crop_data",
+ crop_data->num_of_streams);
+ } else {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
+ payload->crop_data = *crop_data;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGE("processEvt crop info failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
+ }
+ }
+ }
+
+ IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,
+ CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
+ payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt prep_snapshot failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
+ CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
+ LOGH("hdr_scene_data: %d %f\n",
+ hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
+ //Handle this HDR meta data only if capture is not in process
+ if (!pme->m_stateMachine.isCaptureRunning()) {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
+ payload->hdr_data = *hdr_scene_data;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt hdr update failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t");
+ }
+ }
+ }
+
+ IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
+ CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
+ payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt asd_update failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
+ LOGH(", metadata for awb params.");
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
+ payload->awb_data = *awb_params;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt awb_update failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
+ pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
+ }
+
+ IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
+ pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
+ }
+
+ IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
+ pme->mExifParams.sensor_params.aperture_value = *aperture_value;
+ }
+
+ IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
+ pme->mExifParams.cam_3a_params = *ae_params;
+ pme->mExifParams.cam_3a_params_valid = TRUE;
+ pme->mFlashNeeded = ae_params->flash_needed;
+ pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
+ payload->ae_data = *ae_params;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt ae_update failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
+ pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
+ }
+
+ IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
+ pme->mExifParams.sensor_params = *sensor_params;
+ }
+
+ IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
+ pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
+ pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
+ pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
+ pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
+ pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
+ pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
+ pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
+ CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
+ if (pme->mExifParams.debug_params) {
+ pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
+ pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
+ }
+ }
+
+ IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)
+ malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
+ payload->led_data = (cam_flash_mode_t)*led_mode;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt led mode override failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ cam_edge_application_t edge_application;
+ memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
+ edge_application.sharpness = pme->mParameters.getSharpness();
+ if (edge_application.sharpness != 0) {
+ edge_application.edge_mode = CAM_EDGE_MODE_FAST;
+ } else {
+ edge_application.edge_mode = CAM_EDGE_MODE_OFF;
+ }
+ ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
+
+ IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
+ CAM_INTF_META_FOCUS_POSITION, pMetaData) {
+ qcamera_sm_internal_evt_payload_t *payload =
+ (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+ if (NULL != payload) {
+ memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+ payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
+ payload->focus_pos = *cur_pos_info;
+ int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+ if (rc != NO_ERROR) {
+ LOGW("processEvt focus_pos_update failed");
+ free(payload);
+ payload = NULL;
+ }
+ } else {
+ LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t");
+ }
+ }
+
+ if (pme->mParameters.getLowLightCapture()) {
+ IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,
+ CAM_INTF_META_LOW_LIGHT, pMetaData) {
+ pme->mParameters.setLowLightLevel(*low_light_level);
+ }
+ }
+
+ IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,
+ CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
+ pme->mParameters.setDynamicImgData(*dyn_img_data);
+ }
+
+ IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
+ LOGD("touch_ae_status: %d", *touch_ae_status);
+ }
+
+ stream->bufDone(frame->buf_idx);
+ free(super_frame);
+
+ LOGD("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION : reprocess_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
+ (after reprocess, e.g., ZSL snapshot frame after WNR if
+ * WNR is enabled)
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *
+ * NOTE : caller passes the ownership of super_frame, it's our
+ * responsibility to free super_frame once it's done. In this
+ * case, reprocessed frame need to be passed to postprocessor
+ * for jpeg encoding.
+ *==========================================================================*/
+void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+ QCameraStream * /*stream*/,
+ void * userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf]: E");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ pme->m_postprocessor.processPPData(super_frame);
+
+ LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION : callback_stream_cb_routine
+ *
+ * DESCRIPTION: function to process CALBACK stream data
+ Frame will processed and sent to framework
+ *
+ * PARAMETERS :
+ * @super_frame : received super buffer
+ * @stream : stream object
+ * @userdata : user data ptr
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+ QCameraStream *stream, void *userdata)
+{
+ ATRACE_CALL();
+ LOGH("[KPI Perf]: E");
+ QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+
+ if (pme == NULL ||
+ pme->mCameraHandle == NULL ||
+ pme->mCameraHandle->camera_handle != super_frame->camera_handle) {
+ LOGE("camera obj not valid");
+ // simply free super frame
+ free(super_frame);
+ return;
+ }
+
+ mm_camera_buf_def_t *frame = super_frame->bufs[0];
+ if (NULL == frame) {
+ LOGE("preview callback frame is NULL");
+ free(super_frame);
+ return;
+ }
+
+ if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+ LOGH("preview is not running, no need to process");
+ stream->bufDone(frame->buf_idx);
+ free(super_frame);
+ return;
+ }
+
+ QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+ // Handle preview data callback
+ if (pme->mDataCb != NULL &&
+ (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) &&
+ (!pme->mParameters.isSceneSelectionEnabled())) {
+ int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx);
+ if (NO_ERROR != rc) {
+ LOGE("Preview callback was not sent succesfully");
+ }
+ }
+ stream->bufDone(frame->buf_idx);
+ free(super_frame);
+ LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
+ *
+ * PARAMETERS :
+ * @data : data ptr
+ * @size : length of data buffer
+ * @index : identifier for data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
+ size_t size, uint32_t index)
+{
+ char value[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.dumpimg", value, "0");
+ uint32_t enabled = (uint32_t) atoi(value);
+ uint32_t frm_num = 0;
+ uint32_t skip_mode = 0;
+
+ char buf[32];
+ cam_dimension_t dim;
+ memset(buf, 0, sizeof(buf));
+ memset(&dim, 0, sizeof(dim));
+
+ if(((enabled & QCAMERA_DUMP_FRM_JPEG) && data) ||
+ ((true == m_bIntJpegEvtPending) && data)) {
+ frm_num = ((enabled & 0xffff0000) >> 16);
+ if(frm_num == 0) {
+ frm_num = 10; //default 10 frames
+ }
+ if(frm_num > 256) {
+ frm_num = 256; //256 buffers cycle around
+ }
+ skip_mode = ((enabled & 0x0000ff00) >> 8);
+ if(skip_mode == 0) {
+ skip_mode = 1; //no-skip
+ }
+
+ if( mDumpSkipCnt % skip_mode == 0) {
+ if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
+ // reset frame count if cycling
+ mDumpFrmCnt = 0;
+ }
+ if (mDumpFrmCnt <= frm_num) {
+ snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg",
+ mDumpFrmCnt, index);
+ if (true == m_bIntJpegEvtPending) {
+ strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH);
+ mBackendFileSize = size;
+ }
+
+ int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ if (file_fd >= 0) {
+ ssize_t written_len = write(file_fd, data, size);
+ fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+ LOGH("written number of bytes %zd\n",
+ written_len);
+ close(file_fd);
+ } else {
+ LOGE("fail to open file for image dumping");
+ }
+ if (false == m_bIntJpegEvtPending) {
+ mDumpFrmCnt++;
+ }
+ }
+ }
+ mDumpSkipCnt++;
+ }
+}
+
+
+void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
+ mm_camera_buf_def_t *frame,char *type)
+{
+ char value[PROPERTY_VALUE_MAX];
+ uint32_t frm_num = 0;
+ metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer;
+ property_get("persist.camera.dumpmetadata", value, "0");
+ uint32_t enabled = (uint32_t) atoi(value);
+ if (stream == NULL) {
+ LOGH("No op");
+ return;
+ }
+
+ uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
+ if(enabled){
+ frm_num = ((enabled & 0xffff0000) >> 16);
+ if (frm_num == 0) {
+ frm_num = 10; //default 10 frames
+ }
+ if (frm_num > 256) {
+ frm_num = 256; //256 buffers cycle around
+ }
+ if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
+ // reset frame count if cycling
+ dumpFrmCnt = 0;
+ }
+ LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
+ if (dumpFrmCnt < frm_num) {
+ char timeBuf[128];
+ char buf[32];
+ memset(buf, 0, sizeof(buf));
+ memset(timeBuf, 0, sizeof(timeBuf));
+ time_t current_time;
+ struct tm * timeinfo;
+ time (&current_time);
+ timeinfo = localtime (&current_time);
+ if (NULL != timeinfo) {
+ strftime(timeBuf, sizeof(timeBuf),
+ QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
+ }
+ String8 filePath(timeBuf);
+ snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
+ filePath.append(buf);
+ int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+ if (file_fd >= 0) {
+ ssize_t written_len = 0;
+ metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
+ void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
+ written_len += write(file_fd, data, sizeof(uint32_t));
+ data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
+ LOGH("tuning_sensor_data_size %d",(int)(*(int *)data));
+ written_len += write(file_fd, data, sizeof(uint32_t));
+ data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
+ LOGH("tuning_vfe_data_size %d",(int)(*(int *)data));
+ written_len += write(file_fd, data, sizeof(uint32_t));
+ data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
+ LOGH("tuning_cpp_data_size %d",(int)(*(int *)data));
+ written_len += write(file_fd, data, sizeof(uint32_t));
+ data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
+ LOGH("tuning_cac_data_size %d",(int)(*(int *)data));
+ written_len += write(file_fd, data, sizeof(uint32_t));
+ data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2);
+ LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data));
+ written_len += write(file_fd, data, sizeof(uint32_t));
+ size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
+ data = (void *)((uint8_t *)&metadata->tuning_params.data);
+ written_len += write(file_fd, data, total_size);
+ total_size = metadata->tuning_params.tuning_vfe_data_size;
+ data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
+ written_len += write(file_fd, data, total_size);
+ total_size = metadata->tuning_params.tuning_cpp_data_size;
+ data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
+ written_len += write(file_fd, data, total_size);
+ total_size = metadata->tuning_params.tuning_cac_data_size;
+ data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
+ written_len += write(file_fd, data, total_size);
+ close(file_fd);
+ }else {
+ LOGE("fail t open file for image dumping");
+ }
+ dumpFrmCnt++;
+ }
+ }
+ stream->mDumpMetaFrame = dumpFrmCnt;
+}
+/*===========================================================================
+ * FUNCTION : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump frame into file for debug purpose.
+ *
+ * PARAMETERS :
+ * @data : data ptr
+ * @size : length of data buffer
+ * @index : identifier for data
+ * @dump_type : type of the frame to be dumped. Only such
+ * dump type is enabled, the frame will be
+ * dumped into a file.
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
+ mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc)
+{
+ char value[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.dumpimg", value, "0");
+ uint32_t enabled = (uint32_t) atoi(value);
+ uint32_t frm_num = 0;
+ uint32_t skip_mode = 0;
+
+ if (NULL == stream) {
+ LOGE("stream object is null");
+ return;
+ }
+
+ uint32_t dumpFrmCnt = stream->mDumpFrame;
+
+ if (true == m_bIntRawEvtPending) {
+ enabled = QCAMERA_DUMP_FRM_RAW;
+ }
+
+ if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) {
+ if((enabled & dump_type) && stream && frame) {
+ frm_num = ((enabled & 0xffff0000) >> 16);
+ if(frm_num == 0) {
+ frm_num = 10; //default 10 frames
+ }
+ if(frm_num > 256) {
+ frm_num = 256; //256 buffers cycle around
+ }
+ skip_mode = ((enabled & 0x0000ff00) >> 8);
+ if(skip_mode == 0) {
+ skip_mode = 1; //no-skip
+ }
+ if(stream->mDumpSkipCnt == 0)
+ stream->mDumpSkipCnt = 1;
+
+ if( stream->mDumpSkipCnt % skip_mode == 0) {
+ if((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
+ // reset frame count if cycling
+ dumpFrmCnt = 0;
+ }
+ if (dumpFrmCnt <= frm_num) {
+ char buf[32];
+ char timeBuf[128];
+ time_t current_time;
+ struct tm * timeinfo;
+
+ memset(timeBuf, 0, sizeof(timeBuf));
+
+ time (&current_time);
+ timeinfo = localtime (&current_time);
+ memset(buf, 0, sizeof(buf));
+
+ cam_dimension_t dim;
+ memset(&dim, 0, sizeof(dim));
+ stream->getFrameDimension(dim);
+
+ cam_frame_len_offset_t offset;
+ memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+ stream->getFrameOffset(offset);
+
+ if (NULL != timeinfo) {
+ strftime(timeBuf, sizeof(timeBuf),
+ QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
+ }
+ String8 filePath(timeBuf);
+ switch (dump_type) {
+ case QCAMERA_DUMP_FRM_PREVIEW:
+ {
+ snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ break;
+ case QCAMERA_DUMP_FRM_THUMBNAIL:
+ {
+ snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ break;
+ case QCAMERA_DUMP_FRM_SNAPSHOT:
+ {
+ if (!mParameters.isPostProcScaling()) {
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
+ } else {
+ stream->getFrameDimension(dim);
+ }
+ if (misc != NULL) {
+ snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
+ } else {
+ snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ }
+ break;
+ case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
+ {
+ stream->getFrameDimension(dim);
+ if (misc != NULL) {
+ snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
+ } else {
+ snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ }
+ break;
+ case QCAMERA_DUMP_FRM_VIDEO:
+ {
+ snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ break;
+ case QCAMERA_DUMP_FRM_RAW:
+ {
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
+ snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ break;
+ case QCAMERA_DUMP_FRM_JPEG:
+ {
+ mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
+ snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv",
+ dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+ }
+ break;
+ default:
+ LOGE("Not supported for dumping stream type %d",
+ dump_type);
+ return;
+ }
+
+ filePath.append(buf);
+ int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+ ssize_t written_len = 0;
+ if (file_fd >= 0) {
+ void *data = NULL;
+
+ fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+ for (uint32_t i = 0; i < offset.num_planes; i++) {
+ uint32_t index = offset.mp[i].offset;
+ if (i > 0) {
+ index += offset.mp[i-1].len;
+ }
+
+ if (offset.mp[i].meta_len != 0) {
+ data = (void *)((uint8_t *)frame->buffer + index);
+ written_len += write(file_fd, data,
+ (size_t)offset.mp[i].meta_len);
+ index += (uint32_t)offset.mp[i].meta_len;
+ }
+
+ for (int j = 0; j < offset.mp[i].height; j++) {
+ data = (void *)((uint8_t *)frame->buffer + index);
+ written_len += write(file_fd, data,
+ (size_t)offset.mp[i].width);
+ index += (uint32_t)offset.mp[i].stride;
+ }
+ }
+
+ LOGH("written number of bytes %ld\n",
+ written_len);
+ close(file_fd);
+ } else {
+ LOGE("fail to open file for image dumping");
+ }
+ if (true == m_bIntRawEvtPending) {
+ strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH);
+ mBackendFileSize = (size_t)written_len;
+ } else {
+ dumpFrmCnt++;
+ }
+ }
+ }
+ stream->mDumpSkipCnt++;
+ }
+ } else {
+ dumpFrmCnt = 0;
+ }
+ stream->mDumpFrame = dumpFrmCnt;
+}
+
+/*===========================================================================
+ * FUNCTION : debugShowVideoFPS
+ *
+ * DESCRIPTION: helper function to log video frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowVideoFPS()
+{
+ mVFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mVLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mVFps = (((double)(mVFrameCount - mVLastFrameCount)) *
+ (double)(s2ns(1))) / (double)diff;
+ LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d",
+ mVFps, mCameraId);
+ mVLastFpsTime = now;
+ mVLastFrameCount = mVFrameCount;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : debugShowPreviewFPS
+ *
+ * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowPreviewFPS()
+{
+ mPFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mPLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mPFps = (((double)(mPFrameCount - mPLastFrameCount)) *
+ (double)(s2ns(1))) / (double)diff;
+ LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d",
+ mPFps, mCameraId);
+ mPLastFpsTime = now;
+ mPLastFrameCount = mPFrameCount;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : fillFacesData
+ *
+ * DESCRIPTION: helper function to fill in face related metadata into a struct.
+ *
+ * PARAMETERS :
+ * @faces_data : face features data to be filled
+ * @metadata : metadata structure to read face features from
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data,
+ metadata_buffer_t *metadata)
+{
+ memset(&faces_data, 0, sizeof(cam_faces_data_t));
+
+ IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data,
+ CAM_INTF_META_FACE_DETECTION, metadata) {
+ faces_data.detection_data = *p_detection_data;
+ if (faces_data.detection_data.num_faces_detected > MAX_ROI) {
+ faces_data.detection_data.num_faces_detected = MAX_ROI;
+ }
+
+ LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d",
+ faces_data.detection_data.num_faces_detected);
+
+ IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
+ CAM_INTF_META_FACE_RECOG, metadata) {
+ faces_data.recog_valid = true;
+ faces_data.recog_data = *p_recog_data;
+ }
+
+ IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data,
+ CAM_INTF_META_FACE_BLINK, metadata) {
+ faces_data.blink_valid = true;
+ faces_data.blink_data = *p_blink_data;
+ }
+
+ IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data,
+ CAM_INTF_META_FACE_GAZE, metadata) {
+ faces_data.gaze_valid = true;
+ faces_data.gaze_data = *p_gaze_data;
+ }
+
+ IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data,
+ CAM_INTF_META_FACE_SMILE, metadata) {
+ faces_data.smile_valid = true;
+ faces_data.smile_data = *p_smile_data;
+ }
+
+ IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks,
+ CAM_INTF_META_FACE_LANDMARK, metadata) {
+ faces_data.landmark_valid = true;
+ faces_data.landmark_data = *p_landmarks;
+ }
+
+ IF_META_AVAILABLE(cam_face_contour_data_t, p_contour,
+ CAM_INTF_META_FACE_CONTOUR, metadata) {
+ faces_data.contour_valid = true;
+ faces_data.contour_data = *p_contour;
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraCbNotifier
+ *
+ * DESCRIPTION: Destructor for exiting the callback context.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraCbNotifier::~QCameraCbNotifier()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : exit
+ *
+ * DESCRIPTION: exit notify thread.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraCbNotifier::exit()
+{
+ mActive = false;
+ mProcTh.exit();
+}
+
+/*===========================================================================
+ * FUNCTION : releaseNotifications
+ *
+ * DESCRIPTION: callback for releasing data stored in the callback queue.
+ *
+ * PARAMETERS :
+ * @data : data to be released
+ * @user_data : context data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
+{
+ qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+
+ if ( ( NULL != arg ) && ( NULL != user_data ) ) {
+ if ( arg->release_cb ) {
+ arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : matchSnapshotNotifications
+ *
+ * DESCRIPTION: matches snapshot data callbacks
+ *
+ * PARAMETERS :
+ * @data : data to match
+ * @user_data : context data
+ *
+ * RETURN : bool match
+ * true - match found
+ * false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
+ void */*user_data*/)
+{
+ qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+ if ( NULL != arg ) {
+ if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : matchPreviewNotifications
+ *
+ * DESCRIPTION: matches preview data callbacks
+ *
+ * PARAMETERS :
+ * @data : data to match
+ * @user_data : context data
+ *
+ * RETURN : bool match
+ * true - match found
+ * false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchPreviewNotifications(void *data,
+ void */*user_data*/)
+{
+ qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+ if (NULL != arg) {
+ if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
+ (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : matchTimestampNotifications
+ *
+ * DESCRIPTION: matches timestamp data callbacks
+ *
+ * PARAMETERS :
+ * @data : data to match
+ * @user_data : context data
+ *
+ * RETURN : bool match
+ * true - match found
+ * false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchTimestampNotifications(void *data,
+ void */*user_data*/)
+{
+ qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+ if (NULL != arg) {
+ if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
+ (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : cbNotifyRoutine
+ *
+ * DESCRIPTION: callback thread which interfaces with the upper layers
+ * given input commands.
+ *
+ * PARAMETERS :
+ * @data : context data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void * QCameraCbNotifier::cbNotifyRoutine(void * data)
+{
+ int running = 1;
+ int ret;
+ QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
+ QCameraCmdThread *cmdThread = &pme->mProcTh;
+ cmdThread->setName("CAM_cbNotify");
+ uint8_t isSnapshotActive = FALSE;
+ bool longShotEnabled = false;
+ uint32_t numOfSnapshotExpected = 0;
+ uint32_t numOfSnapshotRcvd = 0;
+ int32_t cbStatus = NO_ERROR;
+
+ LOGD("E");
+ do {
+ do {
+ ret = cam_sem_wait(&cmdThread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGD("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ camera_cmd_type_t cmd = cmdThread->getCmd();
+ LOGD("get cmd %d", cmd);
+ switch (cmd) {
+ case CAMERA_CMD_TYPE_START_DATA_PROC:
+ {
+ isSnapshotActive = TRUE;
+ numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
+ longShotEnabled = pme->mParent->isLongshotEnabled();
+ LOGD("Num Snapshots Expected = %d",
+ numOfSnapshotExpected);
+ numOfSnapshotRcvd = 0;
+ }
+ break;
+ case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+ {
+ pme->mDataQ.flushNodes(matchSnapshotNotifications);
+ isSnapshotActive = FALSE;
+
+ numOfSnapshotExpected = 0;
+ numOfSnapshotRcvd = 0;
+ }
+ break;
+ case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+ {
+ qcamera_callback_argm_t *cb =
+ (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
+ cbStatus = NO_ERROR;
+ if (NULL != cb) {
+ LOGD("cb type %d received",
+ cb->cb_type);
+
+ if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
+ switch (cb->cb_type) {
+ case QCAMERA_NOTIFY_CALLBACK:
+ {
+ if (cb->msg_type == CAMERA_MSG_FOCUS) {
+ KPI_ATRACE_INT("Camera:AutoFocus", 0);
+ LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP");
+ }
+ if (pme->mNotifyCb) {
+ pme->mNotifyCb(cb->msg_type,
+ cb->ext1,
+ cb->ext2,
+ pme->mCallbackCookie);
+ } else {
+ LOGW("notify callback not set!");
+ }
+ if (cb->release_cb) {
+ cb->release_cb(cb->user_data, cb->cookie,
+ cbStatus);
+ }
+ }
+ break;
+ case QCAMERA_DATA_CALLBACK:
+ {
+ if (pme->mDataCb) {
+ pme->mDataCb(cb->msg_type,
+ cb->data,
+ cb->index,
+ cb->metadata,
+ pme->mCallbackCookie);
+ } else {
+ LOGW("data callback not set!");
+ }
+ if (cb->release_cb) {
+ cb->release_cb(cb->user_data, cb->cookie,
+ cbStatus);
+ }
+ }
+ break;
+ case QCAMERA_DATA_TIMESTAMP_CALLBACK:
+ {
+ if(pme->mDataCbTimestamp) {
+ pme->mDataCbTimestamp(cb->timestamp,
+ cb->msg_type,
+ cb->data,
+ cb->index,
+ pme->mCallbackCookie);
+ } else {
+ LOGE("Timestamp data callback not set!");
+ }
+ if (cb->release_cb) {
+ cb->release_cb(cb->user_data, cb->cookie,
+ cbStatus);
+ }
+ }
+ break;
+ case QCAMERA_DATA_SNAPSHOT_CALLBACK:
+ {
+ if (TRUE == isSnapshotActive && pme->mDataCb ) {
+ if (!longShotEnabled) {
+ numOfSnapshotRcvd++;
+ LOGI("Num Snapshots Received = %d Expected = %d",
+ numOfSnapshotRcvd, numOfSnapshotExpected);
+ if (numOfSnapshotExpected > 0 &&
+ (numOfSnapshotExpected == numOfSnapshotRcvd)) {
+ LOGI("Received all snapshots");
+ // notify HWI that snapshot is done
+ pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
+ NULL);
+ }
+ }
+ if (pme->mJpegCb) {
+ LOGI("Calling JPEG Callback!! for camera %d"
+ "release_data %p",
+ "frame_idx %d",
+ pme->mParent->getCameraId(),
+ cb->user_data,
+ cb->frame_index);
+ pme->mJpegCb(cb->msg_type, cb->data,
+ cb->index, cb->metadata,
+ pme->mJpegCallbackCookie,
+ cb->frame_index, cb->release_cb,
+ cb->cookie, cb->user_data);
+ // incase of non-null Jpeg cb we transfer
+ // ownership of buffer to muxer. hence
+ // release_cb should not be called
+ // muxer will release after its done with
+ // processing the buffer
+ } else if(pme->mDataCb){
+ pme->mDataCb(cb->msg_type, cb->data, cb->index,
+ cb->metadata, pme->mCallbackCookie);
+ if (cb->release_cb) {
+ cb->release_cb(cb->user_data, cb->cookie,
+ cbStatus);
+ }
+ }
+ }
+ }
+ break;
+ default:
+ {
+ LOGE("invalid cb type %d",
+ cb->cb_type);
+ cbStatus = BAD_VALUE;
+ if (cb->release_cb) {
+ cb->release_cb(cb->user_data, cb->cookie,
+ cbStatus);
+ }
+ }
+ break;
+ };
+ } else {
+ LOGW("cb message type %d not enabled!",
+ cb->msg_type);
+ cbStatus = INVALID_OPERATION;
+ if (cb->release_cb) {
+ cb->release_cb(cb->user_data, cb->cookie, cbStatus);
+ }
+ }
+ delete cb;
+ } else {
+ LOGW("invalid cb type passed");
+ }
+ }
+ break;
+ case CAMERA_CMD_TYPE_EXIT:
+ {
+ running = 0;
+ pme->mDataQ.flush();
+ }
+ break;
+ default:
+ break;
+ }
+ } while (running);
+ LOGD("X");
+
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : notifyCallback
+ *
+ * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
+ *
+ * PARAMETERS :
+ * @cbArgs : callback arguments
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
+{
+ if (!mActive) {
+ LOGE("notify thread is not active");
+ return UNKNOWN_ERROR;
+ }
+
+ qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
+ if (NULL == cbArg) {
+ LOGE("no mem for qcamera_callback_argm_t");
+ return NO_MEMORY;
+ }
+ memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
+ *cbArg = cbArgs;
+
+ if (mDataQ.enqueue((void *)cbArg)) {
+ return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ } else {
+ LOGE("Error adding cb data into queue");
+ delete cbArg;
+ return UNKNOWN_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setCallbacks
+ *
+ * DESCRIPTION: Initializes the callback functions, which would be used for
+ * communication with the upper layers and launches the callback
+ * context in which the callbacks will occur.
+ *
+ * PARAMETERS :
+ * @notifyCb : notification callback
+ * @dataCb : data callback
+ * @dataCbTimestamp : data with timestamp callback
+ * @callbackCookie : callback context data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
+ camera_data_callback dataCb,
+ camera_data_timestamp_callback dataCbTimestamp,
+ void *callbackCookie)
+{
+ if ( ( NULL == mNotifyCb ) &&
+ ( NULL == mDataCb ) &&
+ ( NULL == mDataCbTimestamp ) &&
+ ( NULL == mCallbackCookie ) ) {
+ mNotifyCb = notifyCb;
+ mDataCb = dataCb;
+ mDataCbTimestamp = dataCbTimestamp;
+ mCallbackCookie = callbackCookie;
+ mActive = true;
+ mProcTh.launch(cbNotifyRoutine, this);
+ } else {
+ LOGE("Camera callback notifier already initialized!");
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegCallBacks
+ *
+ * DESCRIPTION: Initializes the JPEG callback function, which would be used for
+ * communication with the upper layers and launches the callback
+ * context in which the callbacks will occur.
+ *
+ * PARAMETERS :
+ * @jpegCb : notification callback
+ * @callbackCookie : callback context data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraCbNotifier::setJpegCallBacks(
+ jpeg_data_callback jpegCb, void *callbackCookie)
+{
+ LOGH("Setting JPEG Callback notifier");
+ mJpegCb = jpegCb;
+ mJpegCallbackCookie = callbackCookie;
+}
+
+/*===========================================================================
+ * FUNCTION : flushPreviewNotifications
+ *
+ * DESCRIPTION: flush all pending preview notifications
+ * from the notifier queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::flushPreviewNotifications()
+{
+ if (!mActive) {
+ LOGE("notify thread is not active");
+ return UNKNOWN_ERROR;
+ }
+ mDataQ.flushNodes(matchPreviewNotifications);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : flushVideoNotifications
+ *
+ * DESCRIPTION: flush all pending video notifications
+ * from the notifier queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::flushVideoNotifications()
+{
+ if (!mActive) {
+ LOGE("notify thread is not active");
+ return UNKNOWN_ERROR;
+ }
+ mDataQ.flushNodes(matchTimestampNotifications);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : startSnapshots
+ *
+ * DESCRIPTION: Enables snapshot mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::startSnapshots()
+{
+ return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
+}
+
+/*===========================================================================
+ * FUNCTION : stopSnapshots
+ *
+ * DESCRIPTION: Disables snapshot processing mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraCbNotifier::stopSnapshots()
+{
+ mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraAllocator.h b/camera/QCamera2/HAL/QCameraAllocator.h
new file mode 100644
index 0000000..ca15a6a
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraAllocator.h
@@ -0,0 +1,63 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_ALLOCATOR__
+#define __QCAMERA_ALLOCATOR__
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraMemory;
+class QCameraHeapMemory;
+
+typedef struct {
+ int32_t (*bgFunction) (void *);
+ void* bgArgs;
+} BackgroundTask;
+
+class QCameraAllocator {
+public:
+ virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+ size_t size, int stride, int scanline, uint8_t &bufferCnt) = 0;
+ virtual int32_t allocateMoreStreamBuf(QCameraMemory *mem_obj,
+ size_t size, uint8_t &bufferCnt) = 0;
+ virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type) = 0;
+ virtual QCameraHeapMemory *allocateMiscBuf(cam_stream_info_t *streamInfo) = 0;
+ virtual QCameraMemory *allocateStreamUserBuf(cam_stream_info_t *streamInfo) = 0;
+ virtual void waitForDeferredAlloc(cam_stream_type_t stream_type) = 0;
+ virtual uint32_t scheduleBackgroundTask(BackgroundTask* bgTask) = 0;
+ virtual int32_t waitForBackgroundTask(uint32_t &taskId) = 0;
+ virtual ~QCameraAllocator() {}
+};
+
+}; /* namespace qcamera */
+#endif /* __QCAMERA_ALLOCATOR__ */
diff --git a/camera/QCamera2/HAL/QCameraChannel.cpp b/camera/QCamera2/HAL/QCameraChannel.cpp
new file mode 100644
index 0000000..73378d0
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraChannel.cpp
@@ -0,0 +1,1601 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraChannel"
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION : QCameraChannel
+ *
+ * DESCRIPTION: constrcutor of QCameraChannel
+ *
+ * PARAMETERS :
+ * @cam_handle : camera handle
+ * @cam_ops : ptr to camera ops table
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops)
+{
+ m_camHandle = cam_handle;
+ m_camOps = cam_ops;
+ m_bIsActive = false;
+ m_bAllowDynBufAlloc = false;
+
+ m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraChannel
+ *
+ * DESCRIPTION: default constrcutor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel()
+{
+ m_camHandle = 0;
+ m_camOps = NULL;
+ m_bIsActive = false;
+
+ m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraChannel
+ *
+ * DESCRIPTION: destructor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraChannel::~QCameraChannel()
+{
+ if (m_bIsActive) {
+ stop();
+ }
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mStreams[i] != NULL) {
+ if (m_handle == mStreams[i]->getChannelHandle()) {
+ delete mStreams[i];
+ }
+ }
+ }
+ mStreams.clear();
+ m_camOps->delete_channel(m_camHandle, m_handle);
+ m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : deleteChannel
+ *
+ * DESCRIPTION: deletes a camera channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraChannel::deleteChannel()
+{
+ if (m_bIsActive) {
+ stop();
+ }
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) && (m_handle == mStreams[i]->getChannelHandle())) {
+ mStreams[i]->deleteStream();
+ }
+ }
+ m_camOps->delete_channel(m_camHandle, m_handle);
+}
+
+/*===========================================================================
+ * FUNCTION : setStreamSyncCB
+ *
+ * DESCRIPTION: reg callback function to stream of stream type
+ *
+ * PARAMETERS :
+ * @stream_type : Stream type for which callback needs to be registered.
+ * @stream_cb : Callback function
+
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * non-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::setStreamSyncCB (cam_stream_type_t stream_type,
+ stream_cb_routine stream_cb)
+{
+ int32_t rc = UNKNOWN_ERROR;
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) && (stream_type == mStreams[i]->getMyType())) {
+ rc = mStreams[i]->setSyncDataCB(stream_cb);
+ break;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : init
+ *
+ * DESCRIPTION: initialization of channel
+ *
+ * PARAMETERS :
+ * @attr : channel bundle attribute setting
+ * @dataCB : data notify callback
+ * @userData: user data ptr
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::init(mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t dataCB,
+ void *userData)
+{
+ m_handle = m_camOps->add_channel(m_camHandle,
+ attr,
+ dataCB,
+ userData);
+ if (m_handle == 0) {
+ LOGE("Add channel failed");
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : addStream
+ *
+ * DESCRIPTION: add a stream into channel
+ *
+ * PARAMETERS :
+ * @allocator : stream related buffer allocator
+ * @streamInfoBuf : ptr to buf that contains stream info
+ * @miscBuf : ptr to buf that contains misc buffers
+ * @minStreamBufNum: number of stream buffers needed
+ * @paddingInfo : padding information
+ * @stream_cb : stream data notify callback
+ * @userdata : user data ptr
+ * @bDynAllocBuf : flag indicating if allow allocate buffers in 2 steps
+ * @online_rotation: rotation applied online
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::addStream(QCameraAllocator &allocator,
+ QCameraHeapMemory *streamInfoBuf, QCameraHeapMemory *miscBuf,
+ uint8_t minStreamBufNum, cam_padding_info_t *paddingInfo,
+ stream_cb_routine stream_cb, void *userdata, bool bDynAllocBuf,
+ bool bDeffAlloc, cam_rotation_t online_rotation)
+{
+ int32_t rc = NO_ERROR;
+ if (mStreams.size() >= MAX_STREAM_NUM_IN_BUNDLE) {
+ LOGE("stream number (%zu) exceeds max limit (%d)",
+ mStreams.size(), MAX_STREAM_NUM_IN_BUNDLE);
+ if (streamInfoBuf != NULL) {
+ streamInfoBuf->deallocate();
+ delete streamInfoBuf;
+ streamInfoBuf = NULL;
+ }
+ return BAD_VALUE;
+ }
+ QCameraStream *pStream = new QCameraStream(allocator,
+ m_camHandle, m_handle, m_camOps, paddingInfo, bDeffAlloc,
+ online_rotation);
+ if (pStream == NULL) {
+ LOGE("No mem for Stream");
+ if (streamInfoBuf != NULL) {
+ streamInfoBuf->deallocate();
+ delete streamInfoBuf;
+ streamInfoBuf = NULL;
+ }
+ return NO_MEMORY;
+ }
+
+ rc = pStream->init(streamInfoBuf, miscBuf, minStreamBufNum,
+ stream_cb, userdata, bDynAllocBuf);
+ if (rc == 0) {
+ mStreams.add(pStream);
+ } else {
+ delete pStream;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : linkStream
+ *
+ * DESCRIPTION: link a stream into channel
+ *
+ * PARAMETERS :
+ * @ch : Channel which the stream belongs to
+ * @stream : Stream which needs to be linked
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::linkStream(QCameraChannel *ch, QCameraStream *stream)
+{
+ int32_t rc = NO_ERROR;
+
+ if ((0 == m_handle) || (NULL == ch) || (NULL == stream)) {
+ return NO_INIT;
+ }
+
+ int32_t handle = m_camOps->link_stream(m_camHandle,
+ ch->getMyHandle(),
+ stream->getMyHandle(),
+ m_handle);
+ if (0 == handle) {
+ LOGE("Linking of stream failed");
+ rc = INVALID_OPERATION;
+ } else {
+ mStreams.add(stream);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : start
+ *
+ * DESCRIPTION: start channel, which will start all streams belong to this channel
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::start()
+{
+ int32_t rc = NO_ERROR;
+
+ if(m_bIsActive) {
+ LOGW("Attempt to start active channel");
+ return rc;
+ }
+ if (mStreams.size() > 1) {
+ // there is more than one stream in the channel
+ // we need to notify mctl that all streams in this channel need to be bundled
+ cam_bundle_config_t bundleInfo;
+ memset(&bundleInfo, 0, sizeof(bundleInfo));
+ rc = m_camOps->get_bundle_info(m_camHandle, m_handle, &bundleInfo);
+ if (rc != NO_ERROR) {
+ LOGE("get_bundle_info failed");
+ return rc;
+ }
+ if (bundleInfo.num_of_streams > 1) {
+ for (int i = 0; i < bundleInfo.num_of_streams; i++) {
+ QCameraStream *pStream = getStreamByServerID(bundleInfo.stream_ids[i]);
+ if (pStream != NULL) {
+ if ((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC))) {
+ // Skip metadata for reprocess now because PP module cannot handle meta data
+ // May need furthur discussion if Imaginglib need meta data
+ continue;
+ }
+
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+ param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
+ param.bundleInfo = bundleInfo;
+ rc = pStream->setParameter(param);
+ if (rc != NO_ERROR) {
+ LOGE("stream setParameter for set bundle failed");
+ return rc;
+ }
+ }
+ }
+ }
+ }
+
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) &&
+ (m_handle == mStreams[i]->getChannelHandle())) {
+ mStreams[i]->start();
+ }
+ }
+ rc = m_camOps->start_channel(m_camHandle, m_handle);
+
+ if (rc != NO_ERROR) {
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) &&
+ (m_handle == mStreams[i]->getChannelHandle())) {
+ mStreams[i]->stop();
+ }
+ }
+ } else {
+ m_bIsActive = true;
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mStreams[i] != NULL) {
+ mStreams[i]->cond_signal();
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::stop()
+{
+ int32_t rc = NO_ERROR;
+ size_t i = 0;
+
+ if (!m_bIsActive) {
+ return NO_INIT;
+ }
+
+ while(i < mStreams.size()) {
+ if (mStreams[i] != NULL) {
+ if (m_handle == mStreams[i]->getChannelHandle()) {
+ mStreams[i]->stop();
+ i++;
+ } else {
+ // Remove linked stream from stream list
+ mStreams.removeAt(i);
+ }
+ }
+ }
+
+ rc = m_camOps->stop_channel(m_camHandle, m_handle);
+
+ m_bIsActive = false;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : bufDone
+ *
+ * DESCRIPTION: return a stream buf back to kernel
+ *
+ * PARAMETERS :
+ * @recvd_frame : stream buf frame to be returned
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+ int32_t rc = NO_ERROR;
+ for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+ if (recvd_frame->bufs[i] != NULL) {
+ for (size_t j = 0; j < mStreams.size(); j++) {
+ if (mStreams[j] != NULL &&
+ mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
+ rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+ break; // break loop j
+ }
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : bufDone
+ *
+ * DESCRIPTION: return specified buffer from super buffer to kernel
+ *
+ * PARAMETERS :
+ * @recvd_frame : stream buf frame to be returned
+ * @stream_id : stream ID of the buffer to be released
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::bufDone(mm_camera_super_buf_t *recvd_frame, uint32_t stream_id)
+{
+ int32_t rc = NO_ERROR;
+ int32_t index;
+ for (int32_t i = 0; i < (int32_t)recvd_frame->num_bufs; i++) {
+ index = -1;
+ if ((recvd_frame->bufs[i] != NULL) &&
+ (recvd_frame->bufs[i]->stream_id == stream_id)) {
+ for (size_t j = 0; j < mStreams.size(); j++) {
+ if ((mStreams[j] != NULL) &&
+ (mStreams[j]->getMyHandle() == stream_id)) {
+ rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+ index = i;
+ break; // break loop j
+ }
+ }
+ if ((index >= 0) && (index < (int32_t)recvd_frame->num_bufs)) {
+ for (int32_t j = index; j < (int32_t)(recvd_frame->num_bufs - 1); j++) {
+ recvd_frame->bufs[j] = recvd_frame->bufs[j + 1];
+ }
+ recvd_frame->num_bufs--;
+ i--;
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ * @previewWindoe : ptr to preview window ops table, needed to set preview
+ * crop information
+ * @crop_info : crop info as a result of zoom operation
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::processZoomDone(preview_stream_ops_t *previewWindow,
+ cam_crop_data_t &crop_info)
+{
+ int32_t rc = NO_ERROR;
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) &&
+ (m_handle == mStreams[i]->getChannelHandle())) {
+ rc = mStreams[i]->processZoomDone(previewWindow, crop_info);
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamByHandle
+ *
+ * DESCRIPTION: return stream object by stream handle
+ *
+ * PARAMETERS :
+ * @streamHandle : stream handle
+ *
+ * RETURN : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByHandle(uint32_t streamHandle)
+{
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
+ return mStreams[i];
+ }
+ }
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamByServerID
+ *
+ * DESCRIPTION: return stream object by stream server ID from daemon
+ *
+ * PARAMETERS :
+ * @serverID : stream server ID
+ *
+ * RETURN : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByServerID(uint32_t serverID)
+{
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mStreams[i] != NULL && mStreams[i]->getMyServerID() == serverID) {
+ return mStreams[i];
+ }
+ }
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamByIndex
+ *
+ * DESCRIPTION: return stream object by index of streams in the channel
+ *
+ * PARAMETERS :
+ * @index : index of stream in the channel
+ *
+ * RETURN : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByIndex(uint32_t index)
+{
+ if (index >= MAX_STREAM_NUM_IN_BUNDLE) {
+ return NULL;
+ }
+
+ if (index < mStreams.size()) {
+ return mStreams[index];
+ }
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : UpdateStreamBasedParameters
+ *
+ * DESCRIPTION: update any stream based settings from parameters
+ *
+ * PARAMETERS :
+ * @param : reference to parameters object
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::UpdateStreamBasedParameters(QCameraParametersIntf &param)
+{
+ int32_t rc = NO_ERROR;
+ if (param.isPreviewFlipChanged()) {
+ // try to find preview stream
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) &&
+ (m_handle == mStreams[i]->getChannelHandle()) &&
+ (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ (mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW))) ) {
+ cam_stream_parm_buffer_t param_buf;
+ memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+ param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+ param_buf.flipInfo.flip_mask =
+ (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_PREVIEW);
+ rc = mStreams[i]->setParameter(param_buf);
+ if (rc != NO_ERROR) {
+ LOGW("set preview stream flip failed");
+ }
+ }
+ }
+ }
+ if (param.isVideoFlipChanged()) {
+ // try to find video stream
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) &&
+ (m_handle == mStreams[i]->getChannelHandle()) &&
+ (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+ (mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO))) ) {
+ cam_stream_parm_buffer_t param_buf;
+ memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+ param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+ param_buf.flipInfo.flip_mask =
+ (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_VIDEO);
+ rc = mStreams[i]->setParameter(param_buf);
+ if (rc != NO_ERROR) {
+ LOGW("set video stream flip failed");
+ }
+ }
+ }
+ }
+ if (param.isSnapshotFlipChanged()) {
+ // try to find snapshot/postview stream
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mStreams[i] != NULL &&
+ (m_handle == mStreams[i]->getChannelHandle()) &&
+ (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ mStreams[i]->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ) ) {
+ cam_stream_parm_buffer_t param_buf;
+ memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+ param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+ param_buf.flipInfo.flip_mask =
+ (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
+ rc = mStreams[i]->setParameter(param_buf);
+ if (rc != NO_ERROR) {
+ LOGW("set snapshot stream flip failed");
+ }
+ }
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraPicChannel
+ *
+ * DESCRIPTION: constructor of QCameraPicChannel
+ *
+ * PARAMETERS :
+ * @cam_handle : camera handle
+ * @cam_ops : ptr to camera ops table
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops) :
+ QCameraChannel(cam_handle, cam_ops)
+{
+ m_bAllowDynBufAlloc = true;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraPicChannel
+ *
+ * DESCRIPTION: default constructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel()
+{
+ m_bAllowDynBufAlloc = true;
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraPicChannel
+ *
+ * DESCRIPTION: destructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraPicChannel::~QCameraPicChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : takePicture
+ *
+ * DESCRIPTION: send request for queued snapshot frames
+ *
+ * PARAMETERS :
+ * @buf : request buf info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::takePicture (mm_camera_req_buf_t *buf)
+{
+ int32_t rc = m_camOps->request_super_buf(m_camHandle, m_handle, buf);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancelPicture
+ *
+ * DESCRIPTION: cancel request for queued snapshot frames
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::cancelPicture()
+{
+ int32_t rc = m_camOps->cancel_super_buf_request(m_camHandle, m_handle);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stopAdvancedCapture
+ *
+ * DESCRIPTION: stop advanced capture based on advanced capture type.
+ *
+ * PARAMETERS :
+ * @type : advanced capture type.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::stopAdvancedCapture(mm_camera_advanced_capture_t type)
+{
+ int32_t rc = m_camOps->process_advanced_capture(m_camHandle,
+ m_handle, type, 0, NULL);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : startAdvancedCapture
+ *
+ * DESCRIPTION: start advanced capture based on advanced capture type.
+ *
+ * PARAMETERS :
+ * @type : advanced capture type.
+ * @config: advance capture config
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::startAdvancedCapture(mm_camera_advanced_capture_t type,
+ cam_capture_frame_config_t *config)
+{
+ int32_t rc = NO_ERROR;
+
+ rc = m_camOps->process_advanced_capture(m_camHandle, m_handle, type,
+ 1, config);
+ return rc;
+}
+
+/*===========================================================================
+* FUNCTION : flushSuperbuffer
+ *
+ * DESCRIPTION: flush the all superbuffer frames.
+ *
+ * PARAMETERS :
+ * @frame_idx : frame index of focused frame
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::flushSuperbuffer(uint32_t frame_idx)
+{
+ int32_t rc = m_camOps->flush_super_buf_queue(m_camHandle, m_handle, frame_idx);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraVideoChannel
+ *
+ * DESCRIPTION: constructor of QCameraVideoChannel
+ *
+ * PARAMETERS :
+ * @cam_handle : camera handle
+ * @cam_ops : ptr to camera ops table
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops) :
+ QCameraChannel(cam_handle, cam_ops)
+{
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraVideoChannel
+ *
+ * DESCRIPTION: default constructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraVideoChannel
+ *
+ * DESCRIPTION: destructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraVideoChannel::~QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : takePicture
+ *
+ * DESCRIPTION: send request for queued snapshot frames
+ *
+ * PARAMETERS :
+ * @mm_camera_req_buf_t : request buf info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::takePicture(mm_camera_req_buf_t *buf)
+{
+ int32_t rc = m_camOps->request_super_buf(m_camHandle, m_handle, buf);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancelPicture
+ *
+ * DESCRIPTION: cancel request for queued snapshot frames
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::cancelPicture()
+{
+ int32_t rc = m_camOps->cancel_super_buf_request(m_camHandle, m_handle);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseFrame
+ *
+ * DESCRIPTION: return video frame from app
+ *
+ * PARAMETERS :
+ * @opaque : ptr to video frame to be returned
+ * @isMetaData : if frame is a metadata or real frame
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::releaseFrame(const void * opaque, bool isMetaData)
+{
+ QCameraStream *pVideoStream = NULL;
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mStreams[i] != NULL && mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO)) {
+ pVideoStream = mStreams[i];
+ break;
+ }
+ }
+
+ if (NULL == pVideoStream) {
+ LOGE("No video stream in the channel");
+ return BAD_VALUE;
+ }
+
+ int32_t rc = pVideoStream->bufDone(opaque, isMetaData);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS :
+ * @cam_handle : camera handle
+ * @cam_ops : ptr to camera ops table
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops) :
+ QCameraChannel(cam_handle, cam_ops),
+ m_pSrcChannel(NULL),
+ mPassCount(0)
+{
+ memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraReprocessChannel
+ *
+ * DESCRIPTION: default constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel() :
+ m_pSrcChannel(NULL),
+ mPassCount(0)
+{
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraReprocessChannel
+ *
+ * DESCRIPTION: destructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraReprocessChannel::~QCameraReprocessChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : addReprocStreamsFromSource
+ *
+ * DESCRIPTION: add reprocess streams from input source channel
+ *
+ * PARAMETERS :
+ * @allocator : stream related buffer allocator
+ * @featureConfig : pp feature configuration
+ * @pSrcChannel : ptr to input source channel that needs reprocess
+ * @minStreamBufNum: number of stream buffers needed
+ * @burstNum : number of burst captures needed
+ * @paddingInfo : padding information
+ * @param : reference to parameters
+ * @contStream : continous streaming mode or burst
+ * @offline : configure for offline reprocessing
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::addReprocStreamsFromSource(
+ QCameraAllocator& allocator, cam_pp_feature_config_t &featureConfig,
+ QCameraChannel *pSrcChannel, uint8_t minStreamBufNum, uint8_t burstNum,
+ cam_padding_info_t *paddingInfo, QCameraParametersIntf &param, bool contStream,
+ bool offline)
+{
+ int32_t rc = 0;
+ QCameraStream *pStream = NULL;
+ QCameraHeapMemory *pStreamInfoBuf = NULL;
+ QCameraHeapMemory *pMiscBuf = NULL;
+ cam_stream_info_t *streamInfo = NULL;
+ cam_padding_info_t padding;
+
+ memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+ if (NULL == paddingInfo) {
+ return BAD_VALUE;
+ }
+ padding = *paddingInfo;
+ //Use maximum padding so that the buffer
+ //can be rotated
+ padding.width_padding = MAX(padding.width_padding, padding.height_padding);
+ padding.height_padding = padding.width_padding;
+ padding.offset_info.offset_x = 0;
+ padding.offset_info.offset_y = 0;
+
+ LOGD("num of src stream = %d", pSrcChannel->getNumOfStreams());
+
+ for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); i++) {
+ cam_pp_feature_config_t pp_featuremask = featureConfig;
+ pStream = pSrcChannel->getStreamByIndex(i);
+ if (pStream != NULL) {
+ if (param.getofflineRAW() && !((pStream->isTypeOf(CAM_STREAM_TYPE_RAW))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+ || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)))) {
+ //Skip all the stream other than RAW and POSTVIEW incase of offline of RAW
+ continue;
+ }
+
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_RAW)
+ && (!param.getofflineRAW())) {
+ // Skip raw for reprocess now because PP module cannot handle
+ // meta data&raw. May need furthur discussion if Imaginglib need meta data
+ continue;
+ }
+
+ if (((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+ && !(param.getManualCaptureMode() >=
+ CAM_MANUAL_CAPTURE_TYPE_3))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_ANALYSIS))) {
+ // Skip metadata
+ continue;
+ }
+
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+ cam_feature_mask_t feature_mask = featureConfig.feature_mask;
+
+ // skip thumbnail reprocessing if not needed
+ if (!param.needThumbnailReprocess(&feature_mask)) {
+ continue;
+ }
+ // CAC, SHARPNESS, FLIP and WNR would have been already applied -
+ // on preview/postview stream in realtime.
+ // So, need not apply again.
+ feature_mask &= ~(CAM_QCOM_FEATURE_DENOISE2D |
+ CAM_QCOM_FEATURE_CAC |
+ CAM_QCOM_FEATURE_SHARPNESS |
+ CAM_QCOM_FEATURE_FLIP |
+ CAM_QCOM_FEATURE_RAW_PROCESSING);
+ if (!feature_mask) {
+ // Skip thumbnail stream reprocessing since no other
+ //reprocessing is enabled.
+ continue;
+ }
+ }
+
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ pp_featuremask.feature_mask = 0;
+ pp_featuremask.feature_mask |= CAM_QCOM_FEATURE_METADATA_PROCESSING;
+ }
+
+ pStreamInfoBuf = allocator.allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+ if (pStreamInfoBuf == NULL) {
+ LOGE("no mem for stream info buf");
+ rc = NO_MEMORY;
+ break;
+ }
+
+ streamInfo = (cam_stream_info_t *)pStreamInfoBuf->getPtr(0);
+ memset(streamInfo, 0, sizeof(cam_stream_info_t));
+ streamInfo->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+ // Enable CPP high performance mode to put it in turbo frequency mode for
+ // burst/longshot/HDR snapshot cases
+ streamInfo->perf_mode = CAM_PERF_HIGH_PERFORMANCE;
+ if (param.getofflineRAW() && (pStream->isTypeOf(CAM_STREAM_TYPE_RAW)
+ || pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))) {
+ if (pp_featuremask.feature_mask & CAM_QCOM_FEATURE_QUADRA_CFA) {
+ param.getStreamFormat(CAM_STREAM_TYPE_OFFLINE_PROC, streamInfo->fmt);
+ } else {
+ streamInfo->fmt = CAM_FORMAT_YUV_420_NV21;
+ }
+ } else {
+ rc = pStream->getFormat(streamInfo->fmt);
+ }
+
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+ param.getThumbnailSize(&(streamInfo->dim.width), &(streamInfo->dim.height));
+ } else {
+ if ((param.isPostProcScaling()) &&
+ (pp_featuremask.feature_mask & CAM_QCOM_FEATURE_SCALE)) {
+ rc = param.getStreamDimension(CAM_STREAM_TYPE_OFFLINE_PROC,
+ streamInfo->dim);
+ } else if ((param.getofflineRAW()) &&
+ ((pStream->isTypeOf(CAM_STREAM_TYPE_RAW)) ||
+ (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)))) {
+ if ((param.getQuadraCfa()) &&
+ (pp_featuremask.feature_mask & CAM_QCOM_FEATURE_QUADRA_CFA)) {
+ rc = pStream->getFrameDimension(streamInfo->dim);
+ } else {
+ param.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,streamInfo->dim);
+ }
+ } else {
+ rc = pStream->getFrameDimension(streamInfo->dim);
+ }
+ }
+
+ if ( contStream ) {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ streamInfo->num_of_burst = 0;
+ } else {
+ streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+ streamInfo->num_of_burst = burstNum;
+ }
+ streamInfo->num_bufs = minStreamBufNum;
+
+ cam_stream_reproc_config_t rp_cfg;
+ memset(&rp_cfg, 0, sizeof(cam_stream_reproc_config_t));
+ if (offline) {
+ cam_frame_len_offset_t offset;
+ memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+
+ rp_cfg.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+ pStream->getFormat(rp_cfg.offline.input_fmt);
+ pStream->getFrameDimension(rp_cfg.offline.input_dim);
+ pStream->getFrameOffset(offset);
+ rp_cfg.offline.input_buf_planes.plane_info = offset;
+ rp_cfg.offline.input_type = pStream->getMyOriginalType();
+ //For input metadata + input buffer
+ rp_cfg.offline.num_of_bufs = 2;
+ } else {
+ rp_cfg.pp_type = CAM_ONLINE_REPROCESS_TYPE;
+ rp_cfg.online.input_stream_id = pStream->getMyServerID();
+ rp_cfg.online.input_stream_type = pStream->getMyOriginalType();
+ }
+ param.getStreamRotation(streamInfo->stream_type,
+ streamInfo->pp_config, streamInfo->dim);
+ streamInfo->reprocess_config = rp_cfg;
+ streamInfo->reprocess_config.pp_feature_config = pp_featuremask;
+
+ if (!(pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)
+ || pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)
+ || pStream->isTypeOf(CAM_STREAM_TYPE_RAW)
+ || pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))) {
+ // CAC, SHARPNESS, FLIP and WNR would have been already applied -
+ // on preview/postview stream in realtime. Need not apply again.
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_CAC;
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_SHARPNESS;
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_FLIP;
+ //Don't do WNR for thumbnail
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_DENOISE2D;
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_CDS;
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_DSDN;
+ //No need of RAW processing for other than RAW streams
+ streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+ ~CAM_QCOM_FEATURE_RAW_PROCESSING;
+
+ if (param.isHDREnabled()
+ && !param.isHDRThumbnailProcessNeeded()){
+ streamInfo->reprocess_config.pp_feature_config.feature_mask
+ &= ~CAM_QCOM_FEATURE_HDR;
+ }
+ }
+
+ cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+ if (offline) {
+ type = streamInfo->reprocess_config.offline.input_type;
+ } else {
+ type = streamInfo->reprocess_config.online.input_stream_type;
+ }
+ if (type == CAM_STREAM_TYPE_SNAPSHOT) {
+ int flipMode = param.getFlipMode(type);
+ if (flipMode > 0) {
+ streamInfo->reprocess_config.pp_feature_config.feature_mask |=
+ CAM_QCOM_FEATURE_FLIP;
+ streamInfo->reprocess_config.pp_feature_config.flip = (uint32_t)flipMode;
+ }
+ }
+
+ if ((streamInfo->reprocess_config.pp_feature_config.feature_mask
+ & CAM_QCOM_FEATURE_SCALE)
+ && param.isReprocScaleEnabled()
+ && param.isUnderReprocScaling()) {
+ //we only Scale Snapshot frame
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ streamInfo->dim.width =
+ streamInfo->reprocess_config.pp_feature_config.scale_param.output_width;
+ streamInfo->dim.height =
+ streamInfo->reprocess_config.pp_feature_config.scale_param.output_height;
+ }
+ LOGH("stream width=%d, height=%d.",
+ streamInfo->dim.width, streamInfo->dim.height);
+ }
+
+ // save source stream handler
+ mSrcStreamHandles[mStreams.size()] = pStream->getMyHandle();
+
+ pMiscBuf = allocator.allocateMiscBuf(streamInfo);
+
+ LOGH("Configure Reprocessing: stream = %d, res = %dX%d, fmt = %d, type = %d",
+ pStream->getMyOriginalType(), streamInfo->dim.width,
+ streamInfo->dim.height, streamInfo->fmt, type);
+
+ // add reprocess stream
+ if (streamInfo->reprocess_config.pp_feature_config.feature_mask
+ & CAM_QCOM_FEATURE_ROTATION) {
+ rc = addStream(allocator, pStreamInfoBuf, pMiscBuf,
+ minStreamBufNum, &padding, NULL, NULL, false, false,
+ streamInfo->reprocess_config.pp_feature_config.rotation);
+ } else {
+ rc = addStream(allocator, pStreamInfoBuf, pMiscBuf,
+ minStreamBufNum, &padding, NULL, NULL, false, false);
+ }
+ if (rc != NO_ERROR) {
+ LOGE("add reprocess stream failed, ret = %d", rc);
+ break;
+ }
+ }
+ }
+
+ if (rc == NO_ERROR) {
+ m_pSrcChannel = pSrcChannel;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamBySrouceHandle
+ *
+ * DESCRIPTION: find reprocess stream by its source stream handle
+ *
+ * PARAMETERS :
+ * @srcHandle : source stream handle
+ *
+ * RETURN : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCameraStream * QCameraReprocessChannel::getStreamBySrouceHandle(uint32_t srcHandle)
+{
+ QCameraStream *pStream = NULL;
+
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if (mSrcStreamHandles[i] == srcHandle) {
+ pStream = mStreams[i];
+ break;
+ }
+ }
+
+ return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION : stop
+ *
+ * DESCRIPTION: stop channel and unmap offline buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::stop()
+{
+ int32_t rc = QCameraChannel::stop();
+
+ if (!mOfflineBuffers.empty()) {
+ QCameraStream *stream = NULL;
+ List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
+ int error = NO_ERROR;
+ for( ; it != mOfflineBuffers.end(); it++) {
+ stream = (*it).stream;
+ if (NULL != stream) {
+ error = stream->unmapBuf((*it).type,
+ (*it).index,
+ -1);
+ if (NO_ERROR != error) {
+ LOGE("Error during offline buffer unmap %d",
+ error);
+ }
+ }
+ }
+ mOfflineBuffers.clear();
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : doReprocessOffline
+ *
+ * DESCRIPTION: request to do offline reprocess on the frame
+ *
+ * PARAMETERS :
+ * @frame : frame to be performed a reprocess
+ * @meta_buf : Metadata buffer for reprocessing
+ * @pStream : Actual reprocess stream
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocessOffline(mm_camera_buf_def_t *frame,
+ mm_camera_buf_def_t *meta_buf, QCameraStream *pStream)
+{
+ int32_t rc = 0;
+ OfflineBuffer mappedBuffer;
+ uint32_t buf_index = 0;
+ uint32_t meta_buf_index = 0;
+
+ if ((frame == NULL) || (meta_buf == NULL)) {
+ LOGE("Invalid Input Paramters");
+ return INVALID_OPERATION;
+ }
+
+ if (pStream == NULL) {
+ pStream = getStreamBySrouceHandle(frame->stream_id);
+ if (pStream == NULL) {
+ LOGE("Input validation failed.");
+ return INVALID_OPERATION;
+ }
+ }
+
+ if (!mOfflineBuffers.empty()) {
+ List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
+ for( ; it != mOfflineBuffers.end(); it++) {
+ buf_index = (buf_index < ((*it).index)) ? ((*it).index) : buf_index;
+ }
+ buf_index += 1;
+ }
+
+ meta_buf_index = buf_index;
+ if (meta_buf != NULL) {
+ rc = pStream->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
+ meta_buf_index,
+ -1,
+ meta_buf->fd,
+ meta_buf->frame_len);
+ if (NO_ERROR != rc ) {
+ LOGE("Error during metadata buffer mapping");
+ rc = -1;
+ return rc;
+ }
+
+ mappedBuffer.index = meta_buf_index;
+ mappedBuffer.stream = pStream;
+ mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
+ mOfflineBuffers.push_back(mappedBuffer);
+ buf_index += 1;
+ }
+
+ rc = pStream->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+ buf_index,
+ -1,
+ frame->fd,
+ frame->frame_len);
+ if (NO_ERROR != rc ) {
+ LOGE("Error during reprocess input buffer mapping");
+ rc = -1;
+ return rc;
+ }
+ mappedBuffer.index = buf_index;
+ mappedBuffer.stream = pStream;
+ mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
+ mOfflineBuffers.push_back(mappedBuffer);
+
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+
+ param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+ param.reprocess.buf_index = buf_index;
+ param.reprocess.frame_idx = frame->frame_idx;
+
+ if (meta_buf != NULL) {
+ param.reprocess.meta_present = 1;
+ param.reprocess.meta_buf_index = meta_buf_index;
+ }
+
+ LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d type = %d",
+ param.reprocess.frame_idx, param.reprocess.buf_index,
+ param.reprocess.meta_buf_index, pStream->getMyOriginalType());
+
+ rc = pStream->setParameter(param);
+ if (rc != NO_ERROR) {
+ LOGE("stream setParameter for reprocess failed");
+ return rc;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : doReprocessOffline
+ *
+ * DESCRIPTION: request to do offline reprocess on the frame
+ *
+ * PARAMETERS :
+ * @frame : frame to be performed a reprocess
+ * @meta_buf : Metadata buffer for reprocessing
+ * @mParameter : camera parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocessOffline(mm_camera_super_buf_t *frame,
+ mm_camera_buf_def_t *meta_buf, QCameraParametersIntf &mParameter)
+{
+ int32_t rc = 0;
+ QCameraStream *pStream = NULL;
+
+ if (mStreams.size() < 1) {
+ LOGE("No reprocess streams");
+ return -1;
+ }
+ if (m_pSrcChannel == NULL) {
+ LOGE("No source channel for reprocess");
+ return -1;
+ }
+
+ if (frame == NULL) {
+ LOGE("Invalid source frame");
+ return BAD_VALUE;
+ }
+
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+ if ((pStream != NULL) &&
+ (m_handle == pStream->getChannelHandle())) {
+ if (mParameter.getofflineRAW() &&
+ !((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))
+ || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)))) {
+ continue;
+ }
+
+ if ((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)
+ && (mParameter.getManualCaptureMode()
+ < CAM_MANUAL_CAPTURE_TYPE_3))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_ANALYSIS))) {
+ // Skip metadata for reprocess now because PP module cannot handle meta data
+ // May need furthur discussion if Imaginglib need meta data
+ continue;
+ }
+
+ // Update Metadata
+ if (meta_buf != NULL) {
+ uint32_t stream_id = frame->bufs[i]->stream_id;
+ QCameraStream *srcStream =
+ m_pSrcChannel->getStreamByHandle(stream_id);
+ metadata_buffer_t *pMetaData =
+ (metadata_buffer_t *)meta_buf->buffer;
+ if ((NULL != pMetaData) && (NULL != srcStream)) {
+ IF_META_AVAILABLE(cam_crop_data_t, crop,
+ CAM_INTF_META_CROP_DATA, pMetaData) {
+ if (MAX_NUM_STREAMS > crop->num_of_streams) {
+ for (int j = 0; j < MAX_NUM_STREAMS; j++) {
+ if (crop->crop_info[j].stream_id ==
+ srcStream->getMyServerID()) {
+ // Store crop/roi information for offline reprocess
+ // in the reprocess stream slot
+ crop->crop_info[crop->num_of_streams].crop =
+ crop->crop_info[j].crop;
+ crop->crop_info[crop->num_of_streams].roi_map =
+ crop->crop_info[j].roi_map;
+ for (uint8_t k = 0; k < mStreams.size(); k++) {
+ if (srcStream->getMyType() ==
+ mStreams[k]->getMyOriginalType()) {
+ crop->crop_info[crop->num_of_streams].stream_id =
+ mStreams[k]->getMyServerID();
+ break;
+ }
+ }
+ crop->num_of_streams++;
+ break;
+ }
+ }
+ } else {
+ LOGE("No space to add reprocess stream crop/roi information");
+ }
+ }
+ }
+ }
+
+ rc = doReprocessOffline (frame->bufs[i], meta_buf, pStream);
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ * @frame : frame to be performed a reprocess
+ * @mParameter : camera parameters
+ * @pMetaStream: Metadata stream handle
+ * @meta_buf_index : Metadata buffer index
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(mm_camera_super_buf_t *frame,
+ QCameraParametersIntf &mParameter, QCameraStream *pMetaStream,
+ uint8_t meta_buf_index)
+{
+ int32_t rc = 0;
+ if (mStreams.size() < 1) {
+ LOGE("No reprocess streams");
+ return -1;
+ }
+ if (m_pSrcChannel == NULL) {
+ LOGE("No source channel for reprocess");
+ return -1;
+ }
+
+ if (pMetaStream == NULL) {
+ LOGW("Null Metadata buffer for processing");
+ }
+
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ QCameraStream *pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+ if ((pStream != NULL) && (m_handle == pStream->getChannelHandle())) {
+ if (mParameter.getofflineRAW() && !((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))
+ || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))
+ || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)))) {
+ //Skip all the stream other than RAW and POSTVIEW incase of offline of RAW
+ continue;
+ }
+ if ((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)
+ && (mParameter.getManualCaptureMode()
+ < CAM_MANUAL_CAPTURE_TYPE_3))
+ || (pStream->isTypeOf(CAM_STREAM_TYPE_ANALYSIS))) {
+ // Skip metadata for reprocess now because PP module cannot handle meta data
+ // May need furthur discussion if Imaginglib need meta data
+ continue;
+ }
+
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+ param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+ param.reprocess.buf_index = frame->bufs[i]->buf_idx;
+ param.reprocess.frame_idx = frame->bufs[i]->frame_idx;
+ if (pMetaStream != NULL) {
+ // we have meta data frame bundled, sent together with reprocess frame
+ param.reprocess.meta_present = 1;
+ param.reprocess.meta_stream_handle = pMetaStream->getMyServerID();
+ param.reprocess.meta_buf_index = meta_buf_index;
+ }
+
+ LOGI("Online reprocessing id = %d buf Id = %d meta index = %d type = %d",
+ param.reprocess.frame_idx, param.reprocess.buf_index,
+ param.reprocess.meta_buf_index, pStream->getMyOriginalType());
+
+ rc = pStream->setParameter(param);
+ if (rc != NO_ERROR) {
+ LOGE("stream setParameter for reprocess failed");
+ break;
+ }
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ * @buf_fd : fd to the input buffer that needs reprocess
+ * @buf_lenght : length of the input buffer
+ * @ret_val : result of reprocess.
+ * Example: Could be faceID in case of register face image.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(int buf_fd,
+ size_t buf_length, int32_t &ret_val)
+{
+ int32_t rc = 0;
+ if (mStreams.size() < 1) {
+ LOGE("No reprocess streams");
+ return -1;
+ }
+
+ uint32_t buf_idx = 0;
+ for (size_t i = 0; i < mStreams.size(); i++) {
+ if ((mStreams[i] != NULL) &&
+ (m_handle != mStreams[i]->getChannelHandle())) {
+ continue;
+ }
+ rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+ buf_idx, -1,
+ buf_fd, buf_length);
+
+ if (rc == NO_ERROR) {
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+ param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+ param.reprocess.buf_index = buf_idx;
+ rc = mStreams[i]->setParameter(param);
+ if (rc == NO_ERROR) {
+ ret_val = param.reprocess.ret_val;
+ }
+ mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+ buf_idx, -1);
+ }
+ }
+ return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraChannel.h b/camera/QCamera2/HAL/QCameraChannel.h
new file mode 100644
index 0000000..1db634d
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraChannel.h
@@ -0,0 +1,171 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CHANNEL_H__
+#define __QCAMERA_CHANNEL_H__
+
+#include "camera.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+#include "QCameraStream.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraChannel
+{
+public:
+ QCameraChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops);
+ QCameraChannel();
+ virtual ~QCameraChannel();
+ virtual int32_t init(mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t dataCB, // data CB for channel data
+ void *userData);
+ // Owner of memory is transferred from the caller to the caller with this call.
+ virtual int32_t addStream(QCameraAllocator& allocator,
+ QCameraHeapMemory *streamInfoBuf, QCameraHeapMemory *miscBuf,
+ uint8_t minStreamBufnum, cam_padding_info_t *paddingInfo,
+ stream_cb_routine stream_cb, void *userdata, bool bDynAllocBuf,
+ bool bDeffAlloc = false, cam_rotation_t online_rotation = ROTATE_0);
+ virtual int32_t linkStream(QCameraChannel *ch, QCameraStream *stream);
+ virtual int32_t start();
+ virtual int32_t stop();
+ virtual int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+ virtual int32_t bufDone(mm_camera_super_buf_t *recvd_frame, uint32_t stream_id);
+ virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+ cam_crop_data_t &crop_info);
+ QCameraStream *getStreamByHandle(uint32_t streamHandle);
+ uint32_t getMyHandle() const {return m_handle;};
+ uint32_t getNumOfStreams() const {return (uint32_t) mStreams.size();};
+ QCameraStream *getStreamByIndex(uint32_t index);
+ QCameraStream *getStreamByServerID(uint32_t serverID);
+ int32_t UpdateStreamBasedParameters(QCameraParametersIntf &param);
+ void deleteChannel();
+ int32_t setStreamSyncCB (cam_stream_type_t stream_type,
+ stream_cb_routine stream_cb);
+ bool isActive() { return m_bIsActive; }
+protected:
+ uint32_t m_camHandle;
+ mm_camera_ops_t *m_camOps;
+ bool m_bIsActive;
+ bool m_bAllowDynBufAlloc; // if buf allocation can be in two steps
+
+ uint32_t m_handle;
+ Vector<QCameraStream *> mStreams;
+ mm_camera_buf_notify_t mDataCB;
+ void *mUserData;
+};
+
+// burst pic channel: i.e. zsl burst mode
+class QCameraPicChannel : public QCameraChannel
+{
+public:
+ QCameraPicChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops);
+ QCameraPicChannel();
+ virtual ~QCameraPicChannel();
+ int32_t takePicture(mm_camera_req_buf_t *buf);
+ int32_t cancelPicture();
+ int32_t stopAdvancedCapture(mm_camera_advanced_capture_t type);
+ int32_t startAdvancedCapture(mm_camera_advanced_capture_t type,
+ cam_capture_frame_config_t *config = NULL);
+ int32_t flushSuperbuffer(uint32_t frame_idx);
+};
+
+// video channel class
+class QCameraVideoChannel : public QCameraChannel
+{
+public:
+ QCameraVideoChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops);
+ QCameraVideoChannel();
+ virtual ~QCameraVideoChannel();
+ int32_t takePicture(mm_camera_req_buf_t *buf);
+ int32_t cancelPicture();
+ int32_t releaseFrame(const void *opaque, bool isMetaData);
+};
+
+// reprocess channel class
+class QCameraReprocessChannel : public QCameraChannel
+{
+public:
+ QCameraReprocessChannel(uint32_t cam_handle,
+ mm_camera_ops_t *cam_ops);
+ QCameraReprocessChannel();
+ virtual ~QCameraReprocessChannel();
+ int32_t addReprocStreamsFromSource(QCameraAllocator& allocator,
+ cam_pp_feature_config_t &config,
+ QCameraChannel *pSrcChannel,
+ uint8_t minStreamBufNum,
+ uint8_t burstNum,
+ cam_padding_info_t *paddingInfo,
+ QCameraParametersIntf &param,
+ bool contStream,
+ bool offline);
+ // online reprocess
+ int32_t doReprocess(mm_camera_super_buf_t *frame,
+ QCameraParametersIntf &param, QCameraStream *pMetaStream,
+ uint8_t meta_buf_index);
+
+ // offline reprocess
+ int32_t doReprocess(int buf_fd, size_t buf_length, int32_t &ret_val);
+
+ int32_t doReprocessOffline(mm_camera_super_buf_t *frame,
+ mm_camera_buf_def_t *meta_buf, QCameraParametersIntf &param);
+
+ int32_t doReprocessOffline(mm_camera_buf_def_t *frame,
+ mm_camera_buf_def_t *meta_buf, QCameraStream *pStream = NULL);
+
+ int32_t stop();
+ QCameraChannel *getSrcChannel(){return m_pSrcChannel;};
+ int8_t getReprocCount(){return mPassCount;};
+ void setReprocCount(int8_t count) {mPassCount = count;};
+
+private:
+ QCameraStream *getStreamBySrouceHandle(uint32_t srcHandle);
+
+ typedef struct {
+ QCameraStream *stream;
+ cam_mapping_buf_type type;
+ uint32_t index;
+ } OfflineBuffer;
+
+ uint32_t mSrcStreamHandles[MAX_STREAM_NUM_IN_BUNDLE];
+ QCameraChannel *m_pSrcChannel; // ptr to source channel for reprocess
+ android::List<OfflineBuffer> mOfflineBuffers;
+ int8_t mPassCount;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CHANNEL_H__ */
diff --git a/camera/QCamera2/HAL/QCameraMem.cpp b/camera/QCamera2/HAL/QCameraMem.cpp
new file mode 100755
index 0000000..00f1548
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraMem.cpp
@@ -0,0 +1,2448 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#define LOG_TAG "QCameraHWI_Mem"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <utils/Errors.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+#include "gralloc.h"
+#include "gralloc_priv.h"
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+#include "QCameraTrace.h"
+
+// Media dependencies
+#include "OMX_QCOMExtns.h"
+#ifdef USE_MEDIA_EXTENSIONS
+#include <media/hardware/HardwareAPI.h>
+typedef struct VideoNativeHandleMetadata media_metadata_buffer;
+#else
+#include "QComOMXMetadata.h"
+typedef struct encoder_media_buffer_type media_metadata_buffer;
+#endif
+
+extern "C" {
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// QCaemra2Memory base class
+
+/*===========================================================================
+ * FUNCTION : QCameraMemory
+ *
+ * DESCRIPTION: default constructor of QCameraMemory
+ *
+ * PARAMETERS :
+ * @cached : flag indicates if using cached memory
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraMemory::QCameraMemory(bool cached,
+ QCameraMemoryPool *pool,
+ cam_stream_type_t streamType, QCameraMemType bufType)
+ :m_bCached(cached),
+ mMemoryPool(pool),
+ mStreamType(streamType),
+ mBufType(bufType)
+{
+ mBufferCount = 0;
+ reset();
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraMemory::~QCameraMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : cacheOpsInternal
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ * @cmd : cache ops command
+ * @vaddr : ptr to the virtual address
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr)
+{
+ if (!m_bCached) {
+ // Memory is not cached, no need for cache ops
+ LOGD("No cache ops here for uncached memory");
+ return OK;
+ }
+
+ struct ion_flush_data cache_inv_data;
+ struct ion_custom_data custom_data;
+ int ret = OK;
+
+ if (index >= mBufferCount) {
+ LOGE("index %d out of bound [0, %d)", index, mBufferCount);
+ return BAD_INDEX;
+ }
+
+ memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+ memset(&custom_data, 0, sizeof(custom_data));
+ cache_inv_data.vaddr = vaddr;
+ cache_inv_data.fd = mMemInfo[index].fd;
+ cache_inv_data.handle = mMemInfo[index].handle;
+ cache_inv_data.length =
+ ( /* FIXME: Should remove this after ION interface changes */ unsigned int)
+ mMemInfo[index].size;
+ custom_data.cmd = cmd;
+ custom_data.arg = (unsigned long)&cache_inv_data;
+
+ LOGH("addr = %p, fd = %d, handle = %lx length = %d, ION Fd = %d",
+ cache_inv_data.vaddr, cache_inv_data.fd,
+ (unsigned long)cache_inv_data.handle, cache_inv_data.length,
+ mMemInfo[index].main_ion_fd);
+ ret = ioctl(mMemInfo[index].main_ion_fd, ION_IOC_CUSTOM, &custom_data);
+ if (ret < 0) {
+ LOGE("Cache Invalidate failed: %s\n", strerror(errno));
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ *
+ * RETURN : file descriptor
+ *==========================================================================*/
+int QCameraMemory::getFd(uint32_t index) const
+{
+ if (index >= mBufferCount)
+ return BAD_INDEX;
+
+ return mMemInfo[index].fd;
+}
+
+/*===========================================================================
+ * FUNCTION : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ *
+ * RETURN : buffer size
+ *==========================================================================*/
+ssize_t QCameraMemory::getSize(uint32_t index) const
+{
+ if (index >= mBufferCount)
+ return BAD_INDEX;
+
+ return (ssize_t)mMemInfo[index].size;
+}
+
+/*===========================================================================
+ * FUNCTION : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of buffers allocated
+ *==========================================================================*/
+uint8_t QCameraMemory::getCnt() const
+{
+ return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION : reset
+ *
+ * DESCRIPTION: reset member variables
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMemory::reset()
+{
+ size_t i, count;
+
+ memset(mMemInfo, 0, sizeof(mMemInfo));
+
+ count = sizeof(mMemInfo) / sizeof(mMemInfo[0]);
+ for (i = 0; i < count; i++) {
+ mMemInfo[i].fd = -1;
+ mMemInfo[i].main_ion_fd = -1;
+ }
+
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : getMappable
+ *
+ * DESCRIPTION: query number of buffers available to map
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of buffers available to map
+ *==========================================================================*/
+uint8_t QCameraMemory::getMappable() const
+{
+ return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION : checkIfAllBuffersMapped
+ *
+ * DESCRIPTION: query if all buffers are mapped
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : 1 as buffer count is always equal to mappable count
+ *==========================================================================*/
+uint8_t QCameraMemory::checkIfAllBuffersMapped() const
+{
+ return 1;
+}
+
+
+/*===========================================================================
+ * FUNCTION : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ * @offset : [input] frame buffer offset
+ * @bufDef : [output] reference to struct to store buffer definition
+ * @index : [input] index of the buffer
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMemory::getBufDef(const cam_frame_len_offset_t &offset,
+ mm_camera_buf_def_t &bufDef, uint32_t index) const
+{
+ if (!mBufferCount) {
+ LOGE("Memory not allocated");
+ return;
+ }
+ bufDef.fd = mMemInfo[index].fd;
+ bufDef.frame_len = mMemInfo[index].size;
+ bufDef.buf_type = CAM_STREAM_BUF_TYPE_MPLANE;
+ bufDef.mem_info = (void *)this;
+ bufDef.planes_buf.num_planes = (int8_t)offset.num_planes;
+ bufDef.buffer = getPtr(index);
+ bufDef.buf_idx = index;
+
+ /* Plane 0 needs to be set separately. Set other planes in a loop */
+ bufDef.planes_buf.planes[0].length = offset.mp[0].len;
+ bufDef.planes_buf.planes[0].m.userptr = (long unsigned int)mMemInfo[index].fd;
+ bufDef.planes_buf.planes[0].data_offset = offset.mp[0].offset;
+ bufDef.planes_buf.planes[0].reserved[0] = 0;
+ for (int i = 1; i < bufDef.planes_buf.num_planes; i++) {
+ bufDef.planes_buf.planes[i].length = offset.mp[i].len;
+ bufDef.planes_buf.planes[i].m.userptr = (long unsigned int)mMemInfo[i].fd;
+ bufDef.planes_buf.planes[i].data_offset = offset.mp[i].offset;
+ bufDef.planes_buf.planes[i].reserved[0] =
+ bufDef.planes_buf.planes[i-1].reserved[0] +
+ bufDef.planes_buf.planes[i-1].length;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getUserBufDef
+ *
+ * DESCRIPTION: Fill Buffer structure with user buffer information
+ This also fills individual stream buffers inside batch baffer strcuture
+ *
+ * PARAMETERS :
+ * @buf_info : user buffer information
+ * @bufDef : Buffer strcuture to fill user buf info
+ * @index : index of the buffer
+ * @plane_offset : plane buffer information
+ * @planeBufDef : [input] frame buffer offset
+ * @bufs : Stream Buffer object
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMemory::getUserBufDef(const cam_stream_user_buf_info_t &buf_info,
+ mm_camera_buf_def_t &bufDef,
+ uint32_t index,
+ const cam_frame_len_offset_t &plane_offset,
+ mm_camera_buf_def_t *planeBufDef,
+ QCameraMemory *bufs) const
+{
+ struct msm_camera_user_buf_cont_t *cont_buf = NULL;
+ uint32_t plane_idx = (index * buf_info.frame_buf_cnt);
+
+ if (!mBufferCount) {
+ LOGE("Memory not allocated");
+ return INVALID_OPERATION;
+ }
+
+ for (int count = 0; count < mBufferCount; count++) {
+ bufDef.fd = mMemInfo[count].fd;
+ bufDef.buf_type = CAM_STREAM_BUF_TYPE_USERPTR;
+ bufDef.frame_len = buf_info.size;
+ bufDef.mem_info = (void *)this;
+ bufDef.buffer = (void *)((uint8_t *)getPtr(count)
+ + (index * buf_info.size));
+ bufDef.buf_idx = index;
+ bufDef.user_buf.num_buffers = (int8_t)buf_info.frame_buf_cnt;
+ bufDef.user_buf.bufs_used = (int8_t)buf_info.frame_buf_cnt;
+
+ //Individual plane buffer structure to be filled
+ cont_buf = (struct msm_camera_user_buf_cont_t *)bufDef.buffer;
+ cont_buf->buf_cnt = bufDef.user_buf.num_buffers;
+
+ for (int i = 0; i < bufDef.user_buf.num_buffers; i++) {
+ bufs->getBufDef(plane_offset, planeBufDef[plane_idx], plane_idx);
+ bufDef.user_buf.buf_idx[i] = -1;
+ cont_buf->buf_idx[i] = planeBufDef[plane_idx].buf_idx;
+ plane_idx++;
+ }
+ bufDef.user_buf.plane_buf = planeBufDef;
+
+ LOGD("num_buf = %d index = %d plane_idx = %d",
+ bufDef.user_buf.num_buffers, index, plane_idx);
+ }
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : alloc
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ * @heap_id : heap id to indicate where the buffers will be allocated from
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::alloc(int count, size_t size, unsigned int heap_id,
+ uint32_t secure_mode)
+{
+ int rc = OK;
+
+ int new_bufCnt = mBufferCount + count;
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "Memsize", size, count);
+
+ if (new_bufCnt > MM_CAMERA_MAX_NUM_FRAMES) {
+ LOGE("Buffer count %d out of bound. Max is %d",
+ new_bufCnt, MM_CAMERA_MAX_NUM_FRAMES);
+ ATRACE_END();
+ return BAD_INDEX;
+ }
+
+ for (int i = mBufferCount; i < new_bufCnt; i ++) {
+ if ( NULL == mMemoryPool ) {
+ LOGH("No memory pool available, allocating now");
+ rc = allocOneBuffer(mMemInfo[i], heap_id, size, m_bCached,
+ secure_mode);
+ if (rc < 0) {
+ LOGE("AllocateIonMemory failed");
+ for (int j = i-1; j >= 0; j--)
+ deallocOneBuffer(mMemInfo[j]);
+ break;
+ }
+ } else {
+ rc = mMemoryPool->allocateBuffer(mMemInfo[i],
+ heap_id,
+ size,
+ m_bCached,
+ mStreamType,
+ secure_mode);
+ if (rc < 0) {
+ LOGE("Memory pool allocation failed");
+ for (int j = i-1; j >= 0; j--)
+ mMemoryPool->releaseBuffer(mMemInfo[j],
+ mStreamType);
+ break;
+ }
+ }
+
+ }
+ ATRACE_END();
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : dealloc
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMemory::dealloc()
+{
+ for (int i = 0; i < mBufferCount; i++) {
+ if ( NULL == mMemoryPool ) {
+ deallocOneBuffer(mMemInfo[i]);
+ } else {
+ mMemoryPool->releaseBuffer(mMemInfo[i], mStreamType);
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : allocOneBuffer
+ *
+ * DESCRIPTION: impl of allocating one buffers of certain size
+ *
+ * PARAMETERS :
+ * @memInfo : [output] reference to struct to store additional memory allocation info
+ * @heap : [input] heap id to indicate where the buffers will be allocated from
+ * @size : [input] lenght of the buffer to be allocated
+ * @cached : [input] flag whether buffer needs to be cached
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::allocOneBuffer(QCameraMemInfo &memInfo,
+ unsigned int heap_id, size_t size, bool cached, uint32_t secure_mode)
+{
+ int rc = OK;
+ struct ion_handle_data handle_data;
+ struct ion_allocation_data alloc;
+ struct ion_fd_data ion_info_fd;
+ int main_ion_fd = -1;
+
+ main_ion_fd = open("/dev/ion", O_RDONLY);
+ if (main_ion_fd < 0) {
+ LOGE("Ion dev open failed: %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+
+ memset(&alloc, 0, sizeof(alloc));
+ alloc.len = size;
+ /* to make it page size aligned */
+ alloc.len = (alloc.len + 4095U) & (~4095U);
+ alloc.align = 4096;
+ if (cached) {
+ alloc.flags = ION_FLAG_CACHED;
+ }
+ alloc.heap_id_mask = heap_id;
+ if (secure_mode == SECURE) {
+ LOGD("Allocate secure buffer\n");
+ alloc.flags = ION_SECURE;
+ alloc.heap_id_mask = ION_HEAP(ION_CP_MM_HEAP_ID);
+ alloc.align = 1048576; // 1 MiB alignment to be able to protect later
+ alloc.len = (alloc.len + 1048575U) & (~1048575U);
+ }
+
+ rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+ if (rc < 0) {
+ LOGE("ION allocation failed: %s\n", strerror(errno));
+ goto ION_ALLOC_FAILED;
+ }
+
+ memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+ ion_info_fd.handle = alloc.handle;
+ rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+ if (rc < 0) {
+ LOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+
+ memInfo.main_ion_fd = main_ion_fd;
+ memInfo.fd = ion_info_fd.fd;
+ memInfo.handle = ion_info_fd.handle;
+ memInfo.size = alloc.len;
+ memInfo.cached = cached;
+ memInfo.heap_id = heap_id;
+
+ LOGD("ION buffer %lx with size %d allocated",
+ (unsigned long)memInfo.handle, alloc.len);
+ return OK;
+
+ION_MAP_FAILED:
+ memset(&handle_data, 0, sizeof(handle_data));
+ handle_data.handle = ion_info_fd.handle;
+ ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(main_ion_fd);
+ION_OPEN_FAILED:
+ return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION : deallocOneBuffer
+ *
+ * DESCRIPTION: impl of deallocating one buffers
+ *
+ * PARAMETERS :
+ * @memInfo : reference to struct that stores additional memory allocation info
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMemory::deallocOneBuffer(QCameraMemInfo &memInfo)
+{
+ struct ion_handle_data handle_data;
+
+ if (memInfo.fd >= 0) {
+ close(memInfo.fd);
+ memInfo.fd = -1;
+ }
+
+ if (memInfo.main_ion_fd >= 0) {
+ memset(&handle_data, 0, sizeof(handle_data));
+ handle_data.handle = memInfo.handle;
+ ioctl(memInfo.main_ion_fd, ION_IOC_FREE, &handle_data);
+ close(memInfo.main_ion_fd);
+ memInfo.main_ion_fd = -1;
+ }
+ memInfo.handle = 0;
+ memInfo.size = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraMemoryPool
+ *
+ * DESCRIPTION: default constructor of QCameraMemoryPool
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraMemoryPool::QCameraMemoryPool()
+{
+ pthread_mutex_init(&mLock, NULL);
+}
+
+
+/*===========================================================================
+ * FUNCTION : ~QCameraMemoryPool
+ *
+ * DESCRIPTION: deconstructor of QCameraMemoryPool
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraMemoryPool::~QCameraMemoryPool()
+{
+ clear();
+ pthread_mutex_destroy(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION : releaseBuffer
+ *
+ * DESCRIPTION: release one cached buffers
+ *
+ * PARAMETERS :
+ * @memInfo : reference to struct that stores additional memory allocation info
+ * @streamType: Type of stream the buffers belongs to
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMemoryPool::releaseBuffer(
+ struct QCameraMemory::QCameraMemInfo &memInfo,
+ cam_stream_type_t streamType)
+{
+ pthread_mutex_lock(&mLock);
+
+ mPools[streamType].push_back(memInfo);
+
+ pthread_mutex_unlock(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION : clear
+ *
+ * DESCRIPTION: clears all cached buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMemoryPool::clear()
+{
+ pthread_mutex_lock(&mLock);
+
+ for (int i = CAM_STREAM_TYPE_DEFAULT; i < CAM_STREAM_TYPE_MAX; i++ ) {
+ List<struct QCameraMemory::QCameraMemInfo>::iterator it;
+ it = mPools[i].begin();
+ for( ; it != mPools[i].end() ; it++) {
+ QCameraMemory::deallocOneBuffer(*it);
+ }
+
+ mPools[i].clear();
+ }
+
+ pthread_mutex_unlock(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION : findBufferLocked
+ *
+ * DESCRIPTION: search for a appropriate cached buffer
+ *
+ * PARAMETERS :
+ * @memInfo : reference to struct that stores additional memory allocation info
+ * @heap_id : type of heap
+ * @size : size of the buffer
+ * @cached : whether the buffer should be cached
+ * @streaType: type of stream this buffer belongs to
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMemoryPool::findBufferLocked(
+ struct QCameraMemory::QCameraMemInfo &memInfo, unsigned int heap_id,
+ size_t size, bool cached, cam_stream_type_t streamType)
+{
+ int rc = NAME_NOT_FOUND;
+
+ if (mPools[streamType].empty()) {
+ return NAME_NOT_FOUND;
+ }
+
+ List<struct QCameraMemory::QCameraMemInfo>::iterator it = mPools[streamType].begin();
+ if (streamType == CAM_STREAM_TYPE_OFFLINE_PROC) {
+ for( ; it != mPools[streamType].end() ; it++) {
+ if( ((*it).size == size) &&
+ ((*it).heap_id == heap_id) &&
+ ((*it).cached == cached) ) {
+ memInfo = *it;
+ LOGD("Found buffer %lx size %d",
+ (unsigned long)memInfo.handle, memInfo.size);
+ mPools[streamType].erase(it);
+ rc = NO_ERROR;
+ break;
+ }
+ }
+ } else {
+ for( ; it != mPools[streamType].end() ; it++) {
+ if(((*it).size >= size) &&
+ ((*it).heap_id == heap_id) &&
+ ((*it).cached == cached) ) {
+ memInfo = *it;
+ LOGD("Found buffer %lx size %d",
+ (unsigned long)memInfo.handle, memInfo.size);
+ mPools[streamType].erase(it);
+ rc = NO_ERROR;
+ break;
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateBuffer
+ *
+ * DESCRIPTION: allocates a buffer from the memory pool,
+ * it will re-use cached buffers if possible
+ *
+ * PARAMETERS :
+ * @memInfo : reference to struct that stores additional memory allocation info
+ * @heap_id : type of heap
+ * @size : size of the buffer
+ * @cached : whether the buffer should be cached
+ * @streaType: type of stream this buffer belongs to
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMemoryPool::allocateBuffer(
+ struct QCameraMemory::QCameraMemInfo &memInfo, unsigned int heap_id,
+ size_t size, bool cached, cam_stream_type_t streamType,
+ uint32_t secure_mode)
+{
+ int rc = NO_ERROR;
+
+ pthread_mutex_lock(&mLock);
+
+ rc = findBufferLocked(memInfo, heap_id, size, cached, streamType);
+ if (NAME_NOT_FOUND == rc ) {
+ LOGD("Buffer not found!");
+ rc = QCameraMemory::allocOneBuffer(memInfo, heap_id, size, cached,
+ secure_mode);
+ }
+
+ pthread_mutex_unlock(&mLock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraHeapMemory
+ *
+ * DESCRIPTION: constructor of QCameraHeapMemory for ion memory used internally in HAL
+ *
+ * PARAMETERS :
+ * @cached : flag indicates if using cached memory
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraHeapMemory::QCameraHeapMemory(bool cached)
+ : QCameraMemory(cached)
+{
+ for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+ mPtr[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraHeapMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraHeapMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraHeapMemory::~QCameraHeapMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ *
+ * RETURN : buffer ptr
+ *==========================================================================*/
+void *QCameraHeapMemory::getPtr(uint32_t index) const
+{
+ if (index >= mBufferCount) {
+ LOGE("index out of bound");
+ return (void *)BAD_INDEX;
+ }
+ return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocate(uint8_t count, size_t size, uint32_t isSecure)
+{
+ int rc = -1;
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "HeapMemsize", size, count);
+ uint32_t heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+ if (isSecure == SECURE) {
+ rc = alloc(count, size, heap_id_mask, SECURE);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+ } else {
+ rc = alloc(count, size, heap_id_mask, NON_SECURE);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+
+ for (int i = 0; i < count; i ++) {
+ void *vaddr = mmap(NULL,
+ mMemInfo[i].size,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mMemInfo[i].fd, 0);
+ if (vaddr == MAP_FAILED) {
+ for (int j = i-1; j >= 0; j --) {
+ munmap(mPtr[j], mMemInfo[j].size);
+ mPtr[j] = NULL;
+ deallocOneBuffer(mMemInfo[j]);
+ }
+ // Deallocate remaining buffers that have already been allocated
+ for (int j = i; j < count; j++) {
+ deallocOneBuffer(mMemInfo[j]);
+ }
+ ATRACE_END();
+ return NO_MEMORY;
+ } else
+ mPtr[i] = vaddr;
+ }
+ }
+ if (rc == 0) {
+ mBufferCount = count;
+ }
+ ATRACE_END();
+ return OK;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocateMore(uint8_t count, size_t size)
+{
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "HeapMemsize", size, count);
+ unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+ int rc = alloc(count, size, heap_id_mask, NON_SECURE);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+
+ for (int i = mBufferCount; i < count + mBufferCount; i ++) {
+ void *vaddr = mmap(NULL,
+ mMemInfo[i].size,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mMemInfo[i].fd, 0);
+ if (vaddr == MAP_FAILED) {
+ for (int j = i-1; j >= mBufferCount; j --) {
+ munmap(mPtr[j], mMemInfo[j].size);
+ mPtr[j] = NULL;
+ deallocOneBuffer(mMemInfo[j]);
+ }
+ ATRACE_END();
+ return NO_MEMORY;
+ } else {
+ mPtr[i] = vaddr;
+ }
+ }
+ mBufferCount = (uint8_t)(mBufferCount + count);
+ ATRACE_END();
+ return OK;
+}
+
+/*===========================================================================
+ * FUNCTION : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraHeapMemory::deallocate()
+{
+ for (int i = 0; i < mBufferCount; i++) {
+ munmap(mPtr[i], mMemInfo[i].size);
+ mPtr[i] = NULL;
+ }
+ dealloc();
+ mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ * @cmd : cache ops command
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+ if (index >= mBufferCount)
+ return BAD_INDEX;
+ return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ * @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::getRegFlags(uint8_t * /*regFlags*/) const
+{
+ return INVALID_OPERATION;
+}
+
+/*===========================================================================
+ * FUNCTION : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ * @index : buffer index
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : camera memory ptr
+ * NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraHeapMemory::getMemory(uint32_t /*index*/, bool /*metadata*/) const
+{
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ * @opaque : opaque ptr
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : buffer index if match found,
+ * -1 if failed
+ *==========================================================================*/
+int QCameraHeapMemory::getMatchBufIndex(const void *opaque,
+ bool metadata) const
+{
+ int index = -1;
+ if (metadata) {
+ return -1;
+ }
+ for (int i = 0; i < mBufferCount; i++) {
+ if (mPtr[i] == opaque) {
+ index = i;
+ break;
+ }
+ }
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraMetadataStreamMemory
+ *
+ * DESCRIPTION: constructor of QCameraMetadataStreamMemory
+ * for ion memory used internally in HAL for metadata
+ *
+ * PARAMETERS :
+ * @cached : flag indicates if using cached memory
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraMetadataStreamMemory::QCameraMetadataStreamMemory(bool cached)
+ : QCameraHeapMemory(cached)
+{
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraMetadataStreamMemory
+ *
+ * DESCRIPTION: destructor of QCameraMetadataStreamMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraMetadataStreamMemory::~QCameraMetadataStreamMemory()
+{
+ if (mBufferCount > 0) {
+ LOGH("%s, buf_cnt > 0, deallocate buffers now.\n", __func__);
+ deallocate();
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ * @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMetadataStreamMemory::getRegFlags(uint8_t *regFlags) const
+{
+ for (int i = 0; i < mBufferCount; i ++) {
+ regFlags[i] = 1;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraStreamMemory
+ *
+ * DESCRIPTION: constructor of QCameraStreamMemory
+ * ION memory allocated directly from /dev/ion and shared with framework
+ *
+ * PARAMETERS :
+ * @memory : camera memory request ops table
+ * @cached : flag indicates if using cached memory
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraStreamMemory::QCameraStreamMemory(camera_request_memory memory,
+ bool cached,
+ QCameraMemoryPool *pool,
+ cam_stream_type_t streamType, __unused cam_stream_buf_type bufType)
+ :QCameraMemory(cached, pool, streamType),
+ mGetMemory(memory)
+{
+ for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+ mCameraMemory[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraStreamMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraStreamMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraStreamMemory::~QCameraStreamMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocate(uint8_t count, size_t size, uint32_t isSecure)
+{
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "StreamMemsize", size, count);
+ unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+ int rc = alloc(count, size, heap_id_mask, isSecure);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+
+ for (int i = 0; i < count; i ++) {
+ if (isSecure == SECURE) {
+ mCameraMemory[i] = 0;
+ } else {
+ mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, this);
+ }
+ }
+ mBufferCount = count;
+ ATRACE_END();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocateMore(uint8_t count, size_t size)
+{
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "StreamMemsize", size, count);
+ unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+ int rc = alloc(count, size, heap_id_mask, NON_SECURE);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+
+ for (int i = mBufferCount; i < mBufferCount + count; i++) {
+ mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, this);
+ }
+ mBufferCount = (uint8_t)(mBufferCount + count);
+ ATRACE_END();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraStreamMemory::deallocate()
+{
+ for (int i = 0; i < mBufferCount; i ++) {
+ if (mCameraMemory[i])
+ mCameraMemory[i]->release(mCameraMemory[i]);
+ mCameraMemory[i] = NULL;
+ }
+ dealloc();
+ mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ * @cmd : cache ops command
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+ if (index >= mBufferCount)
+ return BAD_INDEX;
+ return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ * @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::getRegFlags(uint8_t *regFlags) const
+{
+ for (int i = 0; i < mBufferCount; i ++)
+ regFlags[i] = 1;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ * @index : buffer index
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : camera memory ptr
+ * NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraStreamMemory::getMemory(uint32_t index,
+ bool metadata) const
+{
+ if (index >= mBufferCount || metadata)
+ return NULL;
+ return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ * @opaque : opaque ptr
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : buffer index if match found,
+ * -1 if failed
+ *==========================================================================*/
+int QCameraStreamMemory::getMatchBufIndex(const void *opaque,
+ bool metadata) const
+{
+ int index = -1;
+ if (metadata) {
+ return -1;
+ }
+ for (int i = 0; i < mBufferCount; i++) {
+ if (mCameraMemory[i]->data == opaque) {
+ index = i;
+ break;
+ }
+ }
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ *
+ * RETURN : buffer ptr
+ *==========================================================================*/
+void *QCameraStreamMemory::getPtr(uint32_t index) const
+{
+ if (index >= mBufferCount) {
+ LOGE("index out of bound");
+ return (void *)BAD_INDEX;
+ }
+ if (mCameraMemory[index] == 0) {
+ return NULL;
+ }
+ return mCameraMemory[index]->data;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraVideoMemory
+ *
+ * DESCRIPTION: constructor of QCameraVideoMemory
+ * VideoStream buffers also include metadata buffers
+ *
+ * PARAMETERS :
+ * @memory : camera memory request ops table
+ * @cached : flag indicates if using cached ION memory
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraVideoMemory::QCameraVideoMemory(camera_request_memory memory,
+ bool cached, QCameraMemType bufType)
+ : QCameraStreamMemory(memory, cached)
+{
+ memset(mMetadata, 0, sizeof(mMetadata));
+ memset(mNativeHandle, 0, sizeof(mNativeHandle));
+ mMetaBufCount = 0;
+ mBufType = bufType;
+ //Set Default color conversion format
+ mUsage = private_handle_t::PRIV_FLAGS_ITU_R_601_FR;
+
+ //Set Default frame format
+ mFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraVideoMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraVideoMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraVideoMemory::~QCameraVideoMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocate(uint8_t count, size_t size, uint32_t isSecure)
+{
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "VideoMemsize", size, count);
+ int rc = QCameraStreamMemory::allocate(count, size, isSecure);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+
+ if (!(mBufType & QCAMERA_MEM_TYPE_BATCH)) {
+ /*
+ * FDs = 1
+ * numInts = 5 //offset, size, usage, timestamp, format
+ */
+ rc = allocateMeta(count, 1, VIDEO_METADATA_NUM_INTS);
+ if (rc != NO_ERROR) {
+ ATRACE_END();
+ return rc;
+ }
+ for (int i = 0; i < count; i ++) {
+ native_handle_t *nh = mNativeHandle[i];
+ if (!nh) {
+ LOGE("Error in getting video native handle");
+ ATRACE_END();
+ return NO_MEMORY;
+ }
+ nh->data[0] = mMemInfo[i].fd;
+ nh->data[1] = 0;
+ nh->data[2] = (int)mMemInfo[i].size;
+ nh->data[3] = mUsage;
+ nh->data[4] = 0; //dummy value for timestamp in non-batch mode
+ nh->data[5] = mFormat;
+ }
+ }
+ mBufferCount = count;
+ ATRACE_END();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocateMore(uint8_t count, size_t size)
+{
+ ATRACE_BEGIN_SNPRINTF("%s %zu %d", "VideoMemsize", size, count);
+ int rc = QCameraStreamMemory::allocateMore(count, size);
+ if (rc < 0) {
+ ATRACE_END();
+ return rc;
+ }
+
+ if (!(mBufType & QCAMERA_MEM_TYPE_BATCH)) {
+ for (int i = mBufferCount; i < count + mBufferCount; i ++) {
+ mMetadata[i] = mGetMemory(-1,
+ sizeof(media_metadata_buffer), 1, this);
+ if (!mMetadata[i]) {
+ LOGE("allocation of video metadata failed.");
+ for (int j = mBufferCount; j <= i-1; j ++) {
+ mMetadata[j]->release(mMetadata[j]);
+ mCameraMemory[j]->release(mCameraMemory[j]);
+ mCameraMemory[j] = NULL;
+ deallocOneBuffer(mMemInfo[j]);;
+ }
+ ATRACE_END();
+ return NO_MEMORY;
+ }
+ media_metadata_buffer * packet =
+ (media_metadata_buffer *)mMetadata[i]->data;
+ //FDs = 1
+ //numInts = 5 (offset, size, usage, timestamp, format)
+ mNativeHandle[i] = native_handle_create(1, VIDEO_METADATA_NUM_INTS);
+#ifdef USE_MEDIA_EXTENSIONS
+ packet->eType = kMetadataBufferTypeNativeHandleSource;
+ packet->pHandle = mNativeHandle[i];
+#else
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ packet->meta_handle = mNativeHandle[i];
+#endif
+ native_handle_t *nh = mNativeHandle[i];
+ if (!nh) {
+ LOGE("Error in getting video native handle");
+ ATRACE_END();
+ return NO_MEMORY;
+ }
+ nh->data[0] = mMemInfo[i].fd;
+ nh->data[1] = 0;
+ nh->data[2] = (int)mMemInfo[i].size;
+ nh->data[3] = mUsage;
+ nh->data[4] = 0; //dummy value for timestamp in non-batch mode
+ nh->data[5] = mFormat;
+ }
+ }
+ mBufferCount = (uint8_t)(mBufferCount + count);
+ mMetaBufCount = mBufferCount;
+ ATRACE_END();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateMeta
+ *
+ * DESCRIPTION: allocate video encoder metadata structure
+ *
+ * PARAMETERS :
+ * @fd_cnt : Total FD count
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocateMeta(uint8_t buf_cnt, int numFDs, int numInts)
+{
+ int rc = NO_ERROR;
+
+ for (int i = 0; i < buf_cnt; i++) {
+ mMetadata[i] = mGetMemory(-1,
+ sizeof(media_metadata_buffer), 1, this);
+ if (!mMetadata[i]) {
+ LOGE("allocation of video metadata failed.");
+ for (int j = (i - 1); j >= 0; j--) {
+ if (NULL != mNativeHandle[j]) {
+ native_handle_delete(mNativeHandle[j]);
+ }
+ mMetadata[j]->release(mMetadata[j]);
+ }
+ return NO_MEMORY;
+ }
+ media_metadata_buffer *packet =
+ (media_metadata_buffer *)mMetadata[i]->data;
+ mNativeHandle[i] = native_handle_create(numFDs, (numInts * numFDs));
+ if (mNativeHandle[i] == NULL) {
+ LOGE("Error in getting video native handle");
+ for (int j = (i - 1); j >= 0; j--) {
+ mMetadata[i]->release(mMetadata[i]);
+ if (NULL != mNativeHandle[j]) {
+ native_handle_delete(mNativeHandle[j]);
+ }
+ mMetadata[j]->release(mMetadata[j]);
+ }
+ return NO_MEMORY;
+ }
+#ifdef USE_MEDIA_EXTENSIONS
+ packet->eType = kMetadataBufferTypeNativeHandleSource;
+ packet->pHandle = mNativeHandle[i];
+#else
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ packet->meta_handle = mNativeHandle[i];
+#endif
+ }
+ mMetaBufCount = buf_cnt;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : deallocateMeta
+ *
+ * DESCRIPTION: deallocate video metadata buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraVideoMemory::deallocateMeta()
+{
+ for (int i = 0; i < mMetaBufCount; i++) {
+ native_handle_t *nh = mNativeHandle[i];
+ if (NULL != nh) {
+ if (native_handle_delete(nh)) {
+ LOGE("Unable to delete native handle");
+ }
+ } else {
+ LOGE("native handle not available");
+ }
+ mNativeHandle[i] = NULL;
+ mMetadata[i]->release(mMetadata[i]);
+ mMetadata[i] = NULL;
+ }
+ mMetaBufCount = 0;
+}
+
+
+/*===========================================================================
+ * FUNCTION : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraVideoMemory::deallocate()
+{
+ deallocateMeta();
+
+ QCameraStreamMemory::deallocate();
+ mBufferCount = 0;
+ mMetaBufCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ * @index : buffer index
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : camera memory ptr
+ * NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraVideoMemory::getMemory(uint32_t index,
+ bool metadata) const
+{
+ if (index >= mMetaBufCount || (!metadata && index >= mBufferCount))
+ return NULL;
+
+ if (metadata)
+ return mMetadata[index];
+ else
+ return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION : updateNativeHandle
+ *
+ * DESCRIPTION: Updating native handle pointer
+ *
+ * PARAMETERS :
+ * @index : buffer index
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : camera native handle ptr
+ * NULL if not supported or failed
+ *==========================================================================*/
+native_handle_t *QCameraVideoMemory::updateNativeHandle(uint32_t index, bool metadata)
+{
+ if (index >= mMetaBufCount || (!metadata && index >= mBufferCount)) {
+ return NULL;
+ }
+
+ native_handle_t *nh = NULL;
+ if (metadata && mMetadata[index] != NULL) {
+ media_metadata_buffer *packet =
+ (media_metadata_buffer *)mMetadata[index]->data;
+ nh = mNativeHandle[index];
+#ifdef USE_MEDIA_EXTENSIONS
+ packet->pHandle = nh;
+#else
+ packet->meta_handle = nh;
+#endif
+ }
+ return nh;
+}
+
+/*===========================================================================
+ * FUNCTION : closeNativeHandle
+ *
+ * DESCRIPTION: close video native handle
+ *
+ * PARAMETERS :
+ * @opaque : ptr to video frame to be returned
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::closeNativeHandle(const void *data, bool metadata)
+{
+ int32_t rc = NO_ERROR;
+ int32_t index = -1;
+
+#ifdef USE_MEDIA_EXTENSIONS
+ camera_memory_t *video_mem = NULL;
+
+ if (metadata) {
+ index = getMatchBufIndex(data, metadata);
+ if (index < 0) {
+ LOGE("Invalid buffer");
+ return BAD_VALUE;
+ }
+ video_mem = getMemory(index, metadata);
+ media_metadata_buffer * packet = NULL;
+ if (video_mem) {
+ packet = (media_metadata_buffer *)video_mem->data;
+ }
+
+ if (packet != NULL && packet->eType ==
+ kMetadataBufferTypeNativeHandleSource) {
+ native_handle_close(packet->pHandle);
+ native_handle_delete(packet->pHandle);
+ packet->pHandle = NULL;
+ } else {
+ LOGE("Invalid Data. Could not release");
+ return BAD_VALUE;
+ }
+ } else {
+ LOGE("Not of type video meta buffer. Failed");
+ return BAD_VALUE;
+ }
+#endif
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ * @opaque : opaque ptr
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : buffer index if match found,
+ * -1 if failed
+ *==========================================================================*/
+int QCameraVideoMemory::getMatchBufIndex(const void *opaque,
+ bool metadata) const
+{
+ int index = -1;
+
+ if (metadata) {
+ for (int i = 0; i < mMetaBufCount; i++) {
+ if (mMetadata[i]->data == opaque) {
+ index = i;
+ break;
+ }
+ }
+ } else {
+ for (int i = 0; i < mBufferCount; i++) {
+ if (mCameraMemory[i]->data == opaque) {
+ index = i;
+ break;
+ }
+ }
+ }
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : setVideoInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ * @usage : usage bit for video
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraVideoMemory::setVideoInfo(int usage, cam_format_t format)
+{
+ mUsage |= usage;
+ mFormat = convCamtoOMXFormat(format);
+}
+
+/*===========================================================================
+ * FUNCTION : convCamtoOMXFormat
+ *
+ * DESCRIPTION: map cam_format_t to corresponding OMX format
+ *
+ * PARAMETERS :
+ * @format : format in cam_format_t type
+ *
+ * RETURN : omx format
+ *==========================================================================*/
+int QCameraVideoMemory::convCamtoOMXFormat(cam_format_t format)
+{
+ int omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ switch (format) {
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ omxFormat = QOMX_COLOR_FormatYVU420SemiPlanar;
+ break;
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ break;
+#ifndef VANILLA_HAL
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+ omxFormat = QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mCompressed;
+ break;
+#endif
+ default:
+ omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ }
+ return omxFormat;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraGrallocMemory
+ *
+ * DESCRIPTION: constructor of QCameraGrallocMemory
+ * preview stream buffers are allocated from gralloc native_windoe
+ *
+ * PARAMETERS :
+ * @memory : camera memory request ops table
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraGrallocMemory::QCameraGrallocMemory(camera_request_memory memory)
+ : QCameraMemory(true), mColorSpace(ITU_R_601_FR)
+{
+ mMinUndequeuedBuffers = 0;
+ mMappableBuffers = 0;
+ mWindow = NULL;
+ mWidth = mHeight = mStride = mScanline = mUsage = 0;
+ mFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+ mGetMemory = memory;
+ for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
+ mBufferHandle[i] = NULL;
+ mLocalFlag[i] = BUFFER_NOT_OWNED;
+ mPrivateHandle[i] = NULL;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraGrallocMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraGrallocMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraGrallocMemory::~QCameraGrallocMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION : setWindowInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ * @window : gralloc ops table ptr
+ * @width : width of preview frame
+ * @height : height of preview frame
+ * @stride : stride of preview frame
+ * @scanline: scanline of preview frame
+ * @foramt : format of preview image
+ * @maxFPS : max fps of preview stream
+ * @usage : usage bit for gralloc
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setWindowInfo(preview_stream_ops_t *window,
+ int width, int height, int stride, int scanline, int format, int maxFPS, int usage)
+{
+ mWindow = window;
+ mWidth = width;
+ mHeight = height;
+ mStride = stride;
+ mScanline = scanline;
+ mFormat = format;
+ mUsage = usage;
+ setMaxFPS(maxFPS);
+}
+
+/*===========================================================================
+ * FUNCTION : setMaxFPS
+ *
+ * DESCRIPTION: set max fps
+ *
+ * PARAMETERS :
+ * @maxFPS : max fps of preview stream
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setMaxFPS(int maxFPS)
+{
+ /* input will be in multiples of 1000 */
+ maxFPS = (maxFPS + 500)/1000;
+
+ /* set the lower cap to 30 always, because we are not supporting runtime update of fps info
+ to display. Otherwise MDP may result in underruns (for example if initial fps is 15max and later
+ changed to 30).*/
+ if (maxFPS < 30) {
+ maxFPS = 30;
+ }
+
+ /* the new fps will be updated in metadata of the next frame enqueued to display*/
+ mMaxFPS = maxFPS;
+ LOGH("Setting max fps %d to display", mMaxFPS);
+}
+
+/*===========================================================================
+ * FUNCTION : displayBuffer
+ *
+ * DESCRIPTION: send received frame to display
+ *
+ * PARAMETERS :
+ * @index : index of preview frame
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::displayBuffer(uint32_t index)
+{
+ int err = NO_ERROR;
+ int dequeuedIdx = BAD_INDEX;
+
+ if (BUFFER_NOT_OWNED == mLocalFlag[index]) {
+ LOGE("buffer to be enqueued is not owned");
+ return INVALID_OPERATION;
+ }
+
+ err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
+ if(err != 0) {
+ LOGE("enqueue_buffer failed, err = %d", err);
+ } else {
+ LOGD("enqueue_buffer hdl=%p", *mBufferHandle[index]);
+ mLocalFlag[index] = BUFFER_NOT_OWNED;
+ }
+
+ buffer_handle_t *buffer_handle = NULL;
+ int stride = 0;
+ err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
+ if (err == NO_ERROR && buffer_handle != NULL) {
+ int i;
+ LOGD("dequed buf hdl =%p", *buffer_handle);
+ for(i = 0; i < mMappableBuffers; i++) {
+ if(mBufferHandle[i] == buffer_handle) {
+ LOGD("Found buffer in idx:%d", i);
+ mLocalFlag[i] = BUFFER_OWNED;
+ dequeuedIdx = i;
+ break;
+ }
+ }
+
+ if ((dequeuedIdx == BAD_INDEX) && (mMappableBuffers < mBufferCount)) {
+ dequeuedIdx = mMappableBuffers;
+ LOGD("Placing buffer in idx:%d", dequeuedIdx);
+ mBufferHandle[dequeuedIdx] = buffer_handle;
+ mLocalFlag[dequeuedIdx] = BUFFER_OWNED;
+
+ mPrivateHandle[dequeuedIdx] =
+ (struct private_handle_t *)(*mBufferHandle[dequeuedIdx]);
+ mMemInfo[dequeuedIdx].main_ion_fd = open("/dev/ion", O_RDONLY);
+ if (mMemInfo[dequeuedIdx].main_ion_fd < 0) {
+ LOGE("failed: could not open ion device");
+ return BAD_INDEX;
+ }
+
+ struct ion_fd_data ion_info_fd;
+ memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+ ion_info_fd.fd = mPrivateHandle[dequeuedIdx]->fd;
+ if (ioctl(mMemInfo[dequeuedIdx].main_ion_fd,
+ ION_IOC_IMPORT, &ion_info_fd) < 0) {
+ LOGE("ION import failed\n");
+ return BAD_INDEX;
+ }
+
+ mCameraMemory[dequeuedIdx] =
+ mGetMemory(mPrivateHandle[dequeuedIdx]->fd,
+ (size_t)mPrivateHandle[dequeuedIdx]->size,
+ 1,
+ (void *)this);
+ LOGH("idx = %d, fd = %d, size = %d, offset = %d",
+ dequeuedIdx, mPrivateHandle[dequeuedIdx]->fd,
+ mPrivateHandle[dequeuedIdx]->size,
+ mPrivateHandle[dequeuedIdx]->offset);
+ mMemInfo[dequeuedIdx].fd = mPrivateHandle[dequeuedIdx]->fd;
+ mMemInfo[dequeuedIdx].size =
+ (size_t)mPrivateHandle[dequeuedIdx]->size;
+ mMemInfo[dequeuedIdx].handle = ion_info_fd.handle;
+
+ mMappableBuffers++;
+ }
+ } else {
+ LOGW("dequeue_buffer, no free buffer from display now");
+ }
+ return dequeuedIdx;
+}
+
+/*===========================================================================
+ * FUNCTION : enqueueBuffer
+ *
+ * DESCRIPTION: enqueue camera frame to display
+ *
+ * PARAMETERS :
+ * @index : index of frame
+ * @timeStamp : frame presentation time
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraGrallocMemory::enqueueBuffer(uint32_t index, nsecs_t timeStamp)
+{
+ int32_t err = NO_ERROR;
+
+ if (BUFFER_NOT_OWNED == mLocalFlag[index]) {
+ LOGE("buffer to be enqueued is not owned");
+ return INVALID_OPERATION;
+ }
+
+ if (timeStamp != 0) {
+ err = mWindow->set_timestamp(mWindow, timeStamp);
+ if (err != NO_ERROR){
+ LOGE("Failed to native window timestamp");
+ }
+ }
+
+ err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
+ if(err != 0) {
+ LOGE("enqueue_buffer failed, err = %d", err);
+ } else {
+ LOGD("enqueue_buffer hdl=%p", *mBufferHandle[index]);
+ mLocalFlag[index] = BUFFER_NOT_OWNED;
+ }
+ return err;
+}
+
+/*===========================================================================
+ * FUNCTION : dequeueBuffer
+ *
+ * DESCRIPTION: receive a buffer from gralloc
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t
+ * NO_ERROR/Buffer index : Success
+ * < 0 failure code
+ *==========================================================================*/
+int32_t QCameraGrallocMemory::dequeueBuffer()
+{
+ int32_t err = NO_ERROR;
+ int32_t dequeuedIdx = BAD_INDEX;
+ buffer_handle_t *buffer_handle = NULL;
+ int32_t stride = 0;
+
+ dequeuedIdx = BAD_INDEX;
+ err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
+ if ((err == NO_ERROR) && (buffer_handle != NULL)) {
+ int i;
+ LOGD("dequed buf hdl =%p", *buffer_handle);
+ for(i = 0; i < mMappableBuffers; i++) {
+ if(mBufferHandle[i] == buffer_handle) {
+ LOGD("Found buffer in idx:%d", i);
+ mLocalFlag[i] = BUFFER_OWNED;
+ dequeuedIdx = i;
+ break;
+ }
+ }
+
+ if ((dequeuedIdx == BAD_INDEX) &&
+ (mMappableBuffers < mBufferCount)) {
+ dequeuedIdx = mMappableBuffers;
+ LOGD("Placing buffer in idx:%d", dequeuedIdx);
+ mBufferHandle[dequeuedIdx] = buffer_handle;
+ mLocalFlag[dequeuedIdx] = BUFFER_OWNED;
+
+ mPrivateHandle[dequeuedIdx] =
+ (struct private_handle_t *)(*mBufferHandle[dequeuedIdx]);
+ //update max fps info
+ setMetaData(mPrivateHandle[dequeuedIdx], UPDATE_REFRESH_RATE, (void*)&mMaxFPS);
+ mMemInfo[dequeuedIdx].main_ion_fd = open("/dev/ion", O_RDONLY);
+ if (mMemInfo[dequeuedIdx].main_ion_fd < 0) {
+ LOGE("failed: could not open ion device");
+ return BAD_INDEX;
+ }
+
+ struct ion_fd_data ion_info_fd;
+ memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+ ion_info_fd.fd = mPrivateHandle[dequeuedIdx]->fd;
+ if (ioctl(mMemInfo[dequeuedIdx].main_ion_fd,
+ ION_IOC_IMPORT, &ion_info_fd) < 0) {
+ LOGE("ION import failed\n");
+ return BAD_INDEX;
+ }
+
+ setMetaData(mPrivateHandle[dequeuedIdx], UPDATE_COLOR_SPACE,
+ &mColorSpace);
+ mCameraMemory[dequeuedIdx] =
+ mGetMemory(mPrivateHandle[dequeuedIdx]->fd,
+ (size_t)mPrivateHandle[dequeuedIdx]->size,
+ 1,
+ (void *)this);
+ LOGH("idx = %d, fd = %d, size = %d, offset = %d",
+ dequeuedIdx, mPrivateHandle[dequeuedIdx]->fd,
+ mPrivateHandle[dequeuedIdx]->size,
+ mPrivateHandle[dequeuedIdx]->offset);
+ mMemInfo[dequeuedIdx].fd = mPrivateHandle[dequeuedIdx]->fd;
+ mMemInfo[dequeuedIdx].size =
+ (size_t)mPrivateHandle[dequeuedIdx]->size;
+ mMemInfo[dequeuedIdx].handle = ion_info_fd.handle;
+
+ mMappableBuffers++;
+ }
+ } else {
+ LOGW("dequeue_buffer, no free buffer from display now");
+ }
+
+ return dequeuedIdx;
+}
+
+
+/*===========================================================================
+ * FUNCTION : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocate(uint8_t count, size_t /*size*/,
+ uint32_t /*isSecure*/)
+{
+ ATRACE_BEGIN_SNPRINTF("%s %d", "Grallocbufcnt", count);
+ int err = 0;
+ status_t ret = NO_ERROR;
+ int gralloc_usage = 0;
+ struct ion_fd_data ion_info_fd;
+ memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+
+ LOGD("E ");
+
+ if (!mWindow) {
+ LOGE("Invalid native window");
+ ATRACE_END();
+ ret = INVALID_OPERATION;
+ goto end;
+ }
+
+ // Increment buffer count by min undequeued buffer.
+ err = mWindow->get_min_undequeued_buffer_count(mWindow,&mMinUndequeuedBuffers);
+ if (err != 0) {
+ LOGE("get_min_undequeued_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+
+ err = mWindow->set_buffer_count(mWindow, count);
+ if (err != 0) {
+ LOGE("set_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+
+ err = mWindow->set_buffers_geometry(mWindow, mWidth, mHeight, mFormat);
+ if (err != 0) {
+ LOGE("set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+
+ gralloc_usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
+ gralloc_usage |= mUsage;
+ err = mWindow->set_usage(mWindow, gralloc_usage);
+ if(err != 0) {
+ /* set_usage error out */
+ LOGE("set_usage rc = %d", err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ LOGH("usage = %d, geometry: %p, %d, %d, %d, %d, %d",
+ gralloc_usage, mWindow, mWidth, mHeight, mStride,
+ mScanline, mFormat);
+
+ mBufferCount = count;
+ if ((count < mMappableBuffers) || (mMappableBuffers == 0)) {
+ mMappableBuffers = count;
+ }
+
+ //Allocate cnt number of buffers from native window
+ for (int cnt = 0; cnt < mMappableBuffers; cnt++) {
+ int stride;
+ err = mWindow->dequeue_buffer(mWindow, &mBufferHandle[cnt], &stride);
+ if(!err) {
+ LOGD("dequeue buf hdl =%p", mBufferHandle[cnt]);
+ mLocalFlag[cnt] = BUFFER_OWNED;
+ } else {
+ mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+ LOGE("dequeue_buffer idx = %d err = %d", cnt, err);
+ }
+
+ LOGD("dequeue buf: %p\n", mBufferHandle[cnt]);
+
+ if(err != 0) {
+ LOGE("dequeue_buffer failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ for(int i = 0; i < cnt; i++) {
+ // Deallocate buffers when the native window is gone
+ struct ion_handle_data ion_handle;
+ memset(&ion_handle, 0, sizeof(ion_handle));
+ ion_handle.handle = mMemInfo[i].handle;
+ if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+ ALOGE("ion free failed");
+ }
+ close(mMemInfo[i].main_ion_fd);
+
+ if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+ err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+ LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[i]));
+ }
+ mLocalFlag[i] = BUFFER_NOT_OWNED;
+ mBufferHandle[i] = NULL;
+ }
+ reset();
+ goto end;
+ }
+
+ mPrivateHandle[cnt] =
+ (struct private_handle_t *)(*mBufferHandle[cnt]);
+ //update max fps info
+ setMetaData(mPrivateHandle[cnt], UPDATE_REFRESH_RATE, (void*)&mMaxFPS);
+ mMemInfo[cnt].main_ion_fd = open("/dev/ion", O_RDONLY);
+ if (mMemInfo[cnt].main_ion_fd < 0) {
+ LOGE("failed: could not open ion device");
+ for(int i = 0; i < cnt; i++) {
+ struct ion_handle_data ion_handle;
+ memset(&ion_handle, 0, sizeof(ion_handle));
+ ion_handle.handle = mMemInfo[i].handle;
+ if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+ LOGE("ion free failed");
+ }
+ close(mMemInfo[i].main_ion_fd);
+ if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+ err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+ LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[i]));
+ }
+ mLocalFlag[i] = BUFFER_NOT_OWNED;
+ mBufferHandle[i] = NULL;
+ }
+ reset();
+ ret = UNKNOWN_ERROR;
+ goto end;
+ } else {
+ ion_info_fd.fd = mPrivateHandle[cnt]->fd;
+ if (ioctl(mMemInfo[cnt].main_ion_fd,
+ ION_IOC_IMPORT, &ion_info_fd) < 0) {
+ LOGE("ION import failed\n");
+ for(int i = 0; i < cnt; i++) {
+ struct ion_handle_data ion_handle;
+ memset(&ion_handle, 0, sizeof(ion_handle));
+ ion_handle.handle = mMemInfo[i].handle;
+ if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+ LOGE("ion free failed");
+ }
+ close(mMemInfo[i].main_ion_fd);
+
+ if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+ err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+ LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[i]));
+ }
+ mLocalFlag[i] = BUFFER_NOT_OWNED;
+ mBufferHandle[i] = NULL;
+ }
+ close(mMemInfo[cnt].main_ion_fd);
+ reset();
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ }
+ setMetaData(mPrivateHandle[cnt], UPDATE_COLOR_SPACE, &mColorSpace);
+ mCameraMemory[cnt] =
+ mGetMemory(mPrivateHandle[cnt]->fd,
+ (size_t)mPrivateHandle[cnt]->size,
+ 1,
+ (void *)this);
+ LOGH("idx = %d, fd = %d, size = %d, offset = %d",
+ cnt, mPrivateHandle[cnt]->fd,
+ mPrivateHandle[cnt]->size,
+ mPrivateHandle[cnt]->offset);
+ mMemInfo[cnt].fd = mPrivateHandle[cnt]->fd;
+ mMemInfo[cnt].size = (size_t)mPrivateHandle[cnt]->size;
+ mMemInfo[cnt].handle = ion_info_fd.handle;
+ }
+
+ //Cancel min_undequeued_buffer buffers back to the window
+ for (int i = 0; i < mMinUndequeuedBuffers; i ++) {
+ err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+ mLocalFlag[i] = BUFFER_NOT_OWNED;
+ }
+
+end:
+ if (ret != NO_ERROR) {
+ mMappableBuffers = 0;
+ }
+ LOGD("X ");
+ ATRACE_END();
+ return ret;
+}
+
+
+/*===========================================================================
+ * FUNCTION : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ * @count : number of buffers to be allocated
+ * @size : lenght of the buffer to be allocated
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocateMore(uint8_t /*count*/, size_t /*size*/)
+{
+ LOGE("Not implenmented yet");
+ return UNKNOWN_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraGrallocMemory::deallocate()
+{
+ LOGD("E ", __FUNCTION__);
+
+ for (int cnt = 0; cnt < mMappableBuffers; cnt++) {
+ mCameraMemory[cnt]->release(mCameraMemory[cnt]);
+ struct ion_handle_data ion_handle;
+ memset(&ion_handle, 0, sizeof(ion_handle));
+ ion_handle.handle = mMemInfo[cnt].handle;
+ if (ioctl(mMemInfo[cnt].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+ LOGE("ion free failed");
+ }
+ close(mMemInfo[cnt].main_ion_fd);
+ if(mLocalFlag[cnt] != BUFFER_NOT_OWNED) {
+ if (mWindow) {
+ mWindow->cancel_buffer(mWindow, mBufferHandle[cnt]);
+ LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[cnt]));
+ } else {
+ LOGE("Preview window is NULL, cannot cancel_buffer: hdl =%p",
+ (*mBufferHandle[cnt]));
+ }
+ }
+ mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+ LOGH("put buffer %d successfully", cnt);
+ }
+ mBufferCount = 0;
+ mMappableBuffers = 0;
+ LOGD("X ",__FUNCTION__);
+}
+
+/*===========================================================================
+ * FUNCTION : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ * @cmd : cache ops command
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+ if (index >= mMappableBuffers)
+ return BAD_INDEX;
+ return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ * @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::getRegFlags(uint8_t *regFlags) const
+{
+ int i = 0;
+ for (i = 0; i < mMinUndequeuedBuffers; i ++)
+ regFlags[i] = 0;
+ for (; i < mMappableBuffers; i ++)
+ regFlags[i] = 1;
+ for (; i < mBufferCount; i ++)
+ regFlags[i] = 0;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ * @index : buffer index
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : camera memory ptr
+ * NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraGrallocMemory::getMemory(uint32_t index,
+ bool metadata) const
+{
+ if (index >= mMappableBuffers || metadata)
+ return NULL;
+ return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ * @opaque : opaque ptr
+ * @metadata: flag if it's metadata
+ *
+ * RETURN : buffer index if match found,
+ * -1 if failed
+ *==========================================================================*/
+int QCameraGrallocMemory::getMatchBufIndex(const void *opaque,
+ bool metadata) const
+{
+ int index = -1;
+ if (metadata) {
+ return -1;
+ }
+ for (int i = 0; i < mMappableBuffers; i++) {
+ if (mCameraMemory[i]->data == opaque) {
+ index = i;
+ break;
+ }
+ }
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer
+ *
+ * RETURN : buffer ptr
+ *==========================================================================*/
+void *QCameraGrallocMemory::getPtr(uint32_t index) const
+{
+ if (index >= mMappableBuffers) {
+ LOGE("index out of bound");
+ return (void *)BAD_INDEX;
+ }
+ return mCameraMemory[index]->data;
+}
+
+/*===========================================================================
+ * FUNCTION : setMappable
+ *
+ * DESCRIPTION: configure the number of buffers ready to map
+ *
+ * PARAMETERS :
+ * @mappable : the number of desired mappable buffers
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setMappable(uint8_t mappable)
+{
+ if (mMappableBuffers == 0) {
+ mMappableBuffers = mappable;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getMappable
+ *
+ * DESCRIPTION: query number of buffers already allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of buffers already allocated
+ *==========================================================================*/
+uint8_t QCameraGrallocMemory::getMappable() const
+{
+ return mMappableBuffers;
+}
+
+/*===========================================================================
+ * FUNCTION : checkIfAllBuffersMapped
+ *
+ * DESCRIPTION: check if all buffers for the are mapped
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : 1 if all buffers mapped
+ * 0 if total buffers not equal to mapped buffers
+ *==========================================================================*/
+uint8_t QCameraGrallocMemory::checkIfAllBuffersMapped() const
+{
+ LOGH("mBufferCount: %d, mMappableBuffers: %d",
+ mBufferCount, mMappableBuffers);
+ return (mBufferCount == mMappableBuffers);
+}
+
+
+}; //namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraMem.h b/camera/QCamera2/HAL/QCameraMem.h
new file mode 100644
index 0000000..a20e29a
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraMem.h
@@ -0,0 +1,295 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HWI_MEM_H__
+#define __QCAMERA2HWI_MEM_H__
+
+// System dependencies
+#include <linux/msm_ion.h>
+#include <utils/Mutex.h>
+#include <utils/List.h>
+
+// Display dependencies
+#include "qdMetaData.h"
+
+// Camera dependencies
+#include "camera.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraMemoryPool;
+
+//OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT
+#define VIDEO_METADATA_NUM_INTS 5
+
+enum QCameraMemType {
+ QCAMERA_MEM_TYPE_DEFAULT = 0,
+ QCAMERA_MEM_TYPE_SECURE = 1,
+ QCAMERA_MEM_TYPE_BATCH = (1 << 1),
+ QCAMERA_MEM_TYPE_COMPRESSED = (1 << 2),
+};
+
+// Base class for all memory types. Abstract.
+class QCameraMemory {
+
+public:
+ int cleanCache(uint32_t index)
+ {
+ return cacheOps(index, ION_IOC_CLEAN_CACHES);
+ }
+ int invalidateCache(uint32_t index)
+ {
+ return cacheOps(index, ION_IOC_INV_CACHES);
+ }
+ int cleanInvalidateCache(uint32_t index)
+ {
+ return cacheOps(index, ION_IOC_CLEAN_INV_CACHES);
+ }
+ int getFd(uint32_t index) const;
+ ssize_t getSize(uint32_t index) const;
+ uint8_t getCnt() const;
+ virtual uint8_t getMappable() const;
+ virtual uint8_t checkIfAllBuffersMapped() const;
+
+ virtual int allocate(uint8_t count, size_t size, uint32_t is_secure) = 0;
+ virtual void deallocate() = 0;
+ virtual int allocateMore(uint8_t count, size_t size) = 0;
+ virtual int cacheOps(uint32_t index, unsigned int cmd) = 0;
+ virtual int getRegFlags(uint8_t *regFlags) const = 0;
+ virtual camera_memory_t *getMemory(uint32_t index,
+ bool metadata) const = 0;
+ virtual int getMatchBufIndex(const void *opaque, bool metadata) const = 0;
+ virtual void *getPtr(uint32_t index) const= 0;
+
+ QCameraMemory(bool cached,
+ QCameraMemoryPool *pool = NULL,
+ cam_stream_type_t streamType = CAM_STREAM_TYPE_DEFAULT,
+ QCameraMemType buf_Type = QCAMERA_MEM_TYPE_DEFAULT);
+ virtual ~QCameraMemory();
+ virtual void reset();
+
+ void getBufDef(const cam_frame_len_offset_t &offset,
+ mm_camera_buf_def_t &bufDef, uint32_t index) const;
+
+ int32_t getUserBufDef(const cam_stream_user_buf_info_t &buf_info,
+ mm_camera_buf_def_t &bufDef, uint32_t index,
+ const cam_frame_len_offset_t &plane_offset,
+ mm_camera_buf_def_t *planebufDef, QCameraMemory *bufs) const;
+
+protected:
+
+ friend class QCameraMemoryPool;
+
+ struct QCameraMemInfo {
+ int fd;
+ int main_ion_fd;
+ ion_user_handle_t handle;
+ size_t size;
+ bool cached;
+ unsigned int heap_id;
+ };
+
+ int alloc(int count, size_t size, unsigned int heap_id,
+ uint32_t is_secure);
+ void dealloc();
+ static int allocOneBuffer(struct QCameraMemInfo &memInfo,
+ unsigned int heap_id, size_t size, bool cached, uint32_t is_secure);
+ static void deallocOneBuffer(struct QCameraMemInfo &memInfo);
+ int cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr);
+
+ bool m_bCached;
+ uint8_t mBufferCount;
+ struct QCameraMemInfo mMemInfo[MM_CAMERA_MAX_NUM_FRAMES];
+ QCameraMemoryPool *mMemoryPool;
+ cam_stream_type_t mStreamType;
+ QCameraMemType mBufType;
+};
+
+class QCameraMemoryPool {
+
+public:
+
+ QCameraMemoryPool();
+ virtual ~QCameraMemoryPool();
+
+ int allocateBuffer(struct QCameraMemory::QCameraMemInfo &memInfo,
+ unsigned int heap_id, size_t size, bool cached,
+ cam_stream_type_t streamType, uint32_t is_secure);
+ void releaseBuffer(struct QCameraMemory::QCameraMemInfo &memInfo,
+ cam_stream_type_t streamType);
+ void clear();
+
+protected:
+
+ int findBufferLocked(struct QCameraMemory::QCameraMemInfo &memInfo,
+ unsigned int heap_id, size_t size, bool cached,
+ cam_stream_type_t streamType);
+
+ android::List<QCameraMemory::QCameraMemInfo> mPools[CAM_STREAM_TYPE_MAX];
+ pthread_mutex_t mLock;
+};
+
+// Internal heap memory is used for memories used internally
+// They are allocated from /dev/ion.
+class QCameraHeapMemory : public QCameraMemory {
+public:
+ QCameraHeapMemory(bool cached);
+ virtual ~QCameraHeapMemory();
+
+ virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+ virtual int allocateMore(uint8_t count, size_t size);
+ virtual void deallocate();
+ virtual int cacheOps(uint32_t index, unsigned int cmd);
+ virtual int getRegFlags(uint8_t *regFlags) const;
+ virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+ virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+ virtual void *getPtr(uint32_t index) const;
+
+private:
+ void *mPtr[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+class QCameraMetadataStreamMemory : public QCameraHeapMemory {
+public:
+ QCameraMetadataStreamMemory(bool cached);
+ virtual ~QCameraMetadataStreamMemory();
+
+ virtual int getRegFlags(uint8_t *regFlags) const;
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraStreamMemory : public QCameraMemory {
+public:
+ QCameraStreamMemory(camera_request_memory getMemory,
+ bool cached,
+ QCameraMemoryPool *pool = NULL,
+ cam_stream_type_t streamType = CAM_STREAM_TYPE_DEFAULT,
+ cam_stream_buf_type buf_Type = CAM_STREAM_BUF_TYPE_MPLANE);
+ virtual ~QCameraStreamMemory();
+
+ virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+ virtual int allocateMore(uint8_t count, size_t size);
+ virtual void deallocate();
+ virtual int cacheOps(uint32_t index, unsigned int cmd);
+ virtual int getRegFlags(uint8_t *regFlags) const;
+ virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+ virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+ virtual void *getPtr(uint32_t index) const;
+
+protected:
+ camera_request_memory mGetMemory;
+ camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraVideoMemory : public QCameraStreamMemory {
+public:
+ QCameraVideoMemory(camera_request_memory getMemory, bool cached,
+ QCameraMemType bufType = QCAMERA_MEM_TYPE_DEFAULT);
+ virtual ~QCameraVideoMemory();
+
+ virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+ virtual int allocateMore(uint8_t count, size_t size);
+ virtual void deallocate();
+ virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+ virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+ int allocateMeta(uint8_t buf_cnt, int numFDs, int numInts);
+ void deallocateMeta();
+ void setVideoInfo(int usage, cam_format_t format);
+ int getUsage(){return mUsage;};
+ int getFormat(){return mFormat;};
+ int convCamtoOMXFormat(cam_format_t format);
+ native_handle_t *updateNativeHandle(uint32_t index, bool metadata = true);
+ int closeNativeHandle(const void *data, bool metadata = true);
+private:
+ camera_memory_t *mMetadata[MM_CAMERA_MAX_NUM_FRAMES];
+ uint8_t mMetaBufCount;
+ int mUsage, mFormat;
+ native_handle_t *mNativeHandle[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+
+// Gralloc Memory is acquired from preview window
+class QCameraGrallocMemory : public QCameraMemory {
+ enum {
+ BUFFER_NOT_OWNED,
+ BUFFER_OWNED,
+ };
+public:
+ QCameraGrallocMemory(camera_request_memory getMemory);
+ void setNativeWindow(preview_stream_ops_t *anw);
+ virtual ~QCameraGrallocMemory();
+
+ virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+ virtual int allocateMore(uint8_t count, size_t size);
+ virtual void deallocate();
+ virtual int cacheOps(uint32_t index, unsigned int cmd);
+ virtual int getRegFlags(uint8_t *regFlags) const;
+ virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+ virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+ virtual void *getPtr(uint32_t index) const;
+ virtual void setMappable(uint8_t mappable);
+ virtual uint8_t getMappable() const;
+ virtual uint8_t checkIfAllBuffersMapped() const;
+
+ void setWindowInfo(preview_stream_ops_t *window, int width, int height,
+ int stride, int scanline, int format, int maxFPS, int usage = 0);
+ // Enqueue/display buffer[index] onto the native window,
+ // and dequeue one buffer from it.
+ // Returns the buffer index of the dequeued buffer.
+ int displayBuffer(uint32_t index);
+ void setMaxFPS(int maxFPS);
+ int32_t enqueueBuffer(uint32_t index, nsecs_t timeStamp = 0);
+ int32_t dequeueBuffer();
+ inline bool isBufOwnedByCamera(uint32_t index){return mLocalFlag[index] == BUFFER_OWNED;};
+
+private:
+ buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
+ int mLocalFlag[MM_CAMERA_MAX_NUM_FRAMES];
+ struct private_handle_t *mPrivateHandle[MM_CAMERA_MAX_NUM_FRAMES];
+ preview_stream_ops_t *mWindow;
+ int mWidth, mHeight, mFormat, mStride, mScanline, mUsage, mMaxFPS;
+ camera_request_memory mGetMemory;
+ camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+ int mMinUndequeuedBuffers;
+ enum ColorSpace_t mColorSpace;
+ uint8_t mMappableBuffers;
+ pthread_mutex_t mLock;
+ uint8_t mEnqueuedBuffers;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HWI_MEM_H__ */
diff --git a/camera/QCamera2/HAL/QCameraMuxer.cpp b/camera/QCamera2/HAL/QCameraMuxer.cpp
new file mode 100644
index 0000000..e2ec989
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraMuxer.cpp
@@ -0,0 +1,2823 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraMuxer"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
+#include STAT_H
+
+// Camera dependencies
+#include "QCameraMuxer.h"
+#include "QCamera2HWI.h"
+#include "QCamera3HWI.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+/* Muxer implementation */
+using namespace android;
+namespace qcamera {
+
+QCameraMuxer *gMuxer = NULL;
+
+//Error Check Macros
+#define CHECK_MUXER() \
+ if (!gMuxer) { \
+ LOGE("Error getting muxer "); \
+ return; \
+ } \
+
+#define CHECK_MUXER_ERROR() \
+ if (!gMuxer) { \
+ LOGE("Error getting muxer "); \
+ return -ENODEV; \
+ } \
+
+#define CHECK_CAMERA(pCam) \
+ if (!pCam) { \
+ LOGE("Error getting physical camera"); \
+ return; \
+ } \
+
+#define CHECK_CAMERA_ERROR(pCam) \
+ if (!pCam) { \
+ LOGE("Error getting physical camera"); \
+ return -ENODEV; \
+ } \
+
+#define CHECK_HWI(hwi) \
+ if (!hwi) { \
+ LOGE("Error !! HWI not found!!"); \
+ return; \
+ } \
+
+#define CHECK_HWI_ERROR(hwi) \
+ if (!hwi) { \
+ LOGE("Error !! HWI not found!!"); \
+ return -ENODEV; \
+ } \
+
+
+/*===========================================================================
+ * FUNCTION : getCameraMuxer
+ *
+ * DESCRIPTION : Creates Camera Muxer if not created
+ *
+ * PARAMETERS:
+ * @pMuxer : Pointer to retrieve Camera Muxer
+ * @num_of_cameras : Number of Physical Cameras on device
+ *
+ * RETURN : NONE
+ *==========================================================================*/
+void QCameraMuxer::getCameraMuxer(
+ QCameraMuxer** pMuxer, uint32_t num_of_cameras)
+{
+ *pMuxer = NULL;
+ if (!gMuxer) {
+ gMuxer = new QCameraMuxer(num_of_cameras);
+ }
+ CHECK_MUXER();
+ *pMuxer = gMuxer;
+ LOGH("gMuxer: %p ", gMuxer);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraMuxer
+ *
+ * DESCRIPTION : QCameraMuxer Constructor
+ *
+ * PARAMETERS:
+ * @num_of_cameras : Number of Physical Cameras on device
+ *
+ *==========================================================================*/
+QCameraMuxer::QCameraMuxer(uint32_t num_of_cameras)
+ : mJpegClientHandle(0),
+ m_pPhyCamera(NULL),
+ m_pLogicalCamera(NULL),
+ m_pCallbacks(NULL),
+ m_bAuxCameraExposed(FALSE),
+ m_nPhyCameras(num_of_cameras),
+ m_nLogicalCameras(0),
+ m_MainJpegQ(releaseJpegInfo, this),
+ m_AuxJpegQ(releaseJpegInfo, this),
+ m_pRelCamMpoJpeg(NULL),
+ m_pMpoCallbackCookie(NULL),
+ m_pJpegCallbackCookie(NULL),
+ m_bDumpImages(FALSE),
+ m_bMpoEnabled(TRUE),
+ m_bFrameSyncEnabled(FALSE),
+ m_bRecordingHintInternallySet(FALSE)
+{
+ setupLogicalCameras();
+ memset(&mJpegOps, 0, sizeof(mJpegOps));
+ memset(&mJpegMpoOps, 0, sizeof(mJpegMpoOps));
+ memset(&mGetMemoryCb, 0, sizeof(mGetMemoryCb));
+ memset(&mDataCb, 0, sizeof(mDataCb));
+
+ // initialize mutex for MPO composition
+ pthread_mutex_init(&m_JpegLock, NULL);
+ // launch MPO composition thread
+ m_ComposeMpoTh.launch(composeMpoRoutine, this);
+
+ //Check whether dual camera images need to be dumped
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.dual.camera.dump", prop, "0");
+ m_bDumpImages = atoi(prop);
+ LOGH("dualCamera dump images:%d ", m_bDumpImages);
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraMuxer
+ *
+ * DESCRIPTION : QCameraMuxer Desctructor
+ *
+ *==========================================================================*/
+QCameraMuxer::~QCameraMuxer() {
+ if (m_pLogicalCamera) {
+ delete [] m_pLogicalCamera;
+ m_pLogicalCamera = NULL;
+ }
+ if (m_pPhyCamera) {
+ delete [] m_pPhyCamera;
+ m_pPhyCamera = NULL;
+ }
+
+ if (NULL != m_pRelCamMpoJpeg) {
+ m_pRelCamMpoJpeg->release(m_pRelCamMpoJpeg);
+ m_pRelCamMpoJpeg = NULL;
+ }
+ // flush Jpeg Queues
+ m_MainJpegQ.flush();
+ m_AuxJpegQ.flush();
+
+ // stop and exit MPO composition thread
+ m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, FALSE);
+ m_ComposeMpoTh.exit();
+
+ pthread_mutex_destroy(&m_JpegLock);
+}
+
+/*===========================================================================
+ * FUNCTION : get_number_of_cameras
+ *
+ * DESCRIPTION : Provide number of Logical Cameras
+ *
+ * RETURN : Number of logical Cameras
+ *==========================================================================*/
+int QCameraMuxer::get_number_of_cameras()
+{
+ return gMuxer->getNumberOfCameras();
+}
+
+/*===========================================================================
+ * FUNCTION : get_camera_info
+ *
+ * DESCRIPTION : get logical camera info
+ *
+ * PARAMETERS:
+ * @camera_id : Logical Camera ID
+ * @info : Logical Main Camera Info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * ENODEV : Camera not found
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rc = NO_ERROR;
+ LOGH("E");
+ cam_sync_type_t type;
+ if ((camera_id < 0) || (camera_id >= gMuxer->getNumberOfCameras())) {
+ LOGE("Camera id %d not found!", camera_id);
+ return -ENODEV;
+ }
+ if(info) {
+ rc = gMuxer->getCameraInfo(camera_id, info, &type);
+ }
+ LOGH("X, rc: %d", rc);
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : set_callbacks
+ *
+ * DESCRIPTION : Not Implemented
+ *
+ * PARAMETERS:
+ * @callbacks : Camera Module Callbacks
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::set_callbacks(__unused const camera_module_callbacks_t *callbacks)
+{
+ // Not implemented
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : camera_device_open
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ * @modue: hw module
+ * @id : camera ID
+ * @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * BAD_VALUE : Invalid Camera ID
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::camera_device_open(
+ __unused const struct hw_module_t *module, const char *id,
+ struct hw_device_t **hw_device)
+{
+ int rc = NO_ERROR;
+ LOGH("id= %d",atoi(id));
+ if (!id) {
+ LOGE("Invalid camera id");
+ return BAD_VALUE;
+ }
+
+ rc = gMuxer->cameraDeviceOpen(atoi(id), hw_device);
+ LOGH("id= %d, rc: %d", atoi(id), rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : open_legacy
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ * @modue: hw module
+ * @id : camera ID
+ * @halVersion: hal version
+ * @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * BAD_VALUE : Invalid Camera ID
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::open_legacy(__unused const struct hw_module_t* module,
+ const char* id, __unused uint32_t halVersion, struct hw_device_t** hw_device)
+{
+ int rc = NO_ERROR;
+ LOGH("id= %d", atoi(id));
+ if (!id) {
+ LOGE("Invalid camera id");
+ return BAD_VALUE;
+ }
+
+ rc = gMuxer->cameraDeviceOpen(atoi(id), hw_device);
+ LOGH("id= %d, rc: %d", atoi(id), rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : set_preview_window
+ *
+ * DESCRIPTION: Set Preview window for main camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @window: Preview window ops
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rc = NO_ERROR;
+ CHECK_MUXER_ERROR();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ // Set preview window only for primary camera
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+ rc = hwi->set_preview_window(pCam->dev, window);
+ if (rc != NO_ERROR) {
+ LOGE("Error!! setting preview window");
+ return rc;
+ }
+ break;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : set_callBacks
+ *
+ * DESCRIPTION: Set Framework callbacks to notify various frame data asynchronously
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @notify_cb: Notification callback
+ * @data_cb: data callback
+ * @data_cb_timestamp: data timestamp callback
+ * @get_memory: callback to obtain memory
+ * @user : userdata
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::set_callBacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ // Set callbacks to HWI
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ hwi->set_CallBacks(pCam->dev, notify_cb, data_cb, data_cb_timestamp,
+ get_memory, user);
+
+ // Set JPG callbacks
+ // sending the physical camera description with the Jpeg callback
+ // this will be retrieved in callbacks to get the cam instance
+ // delivering JPEGs
+ hwi->setJpegCallBacks(jpeg_data_callback, (void*)pCam);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ rc = gMuxer->setMainJpegCallbackCookie((void*)(pCam));
+ if(rc != NO_ERROR) {
+ LOGW("Error setting Jpeg callback cookie");
+ }
+ }
+ }
+ // Store callback in Muxer to send data callbacks
+ rc = gMuxer->setDataCallback(data_cb);
+ if(rc != NO_ERROR) {
+ LOGW("Error setting data callback");
+ }
+ // memory callback stored to allocate memory for MPO buffer
+ rc = gMuxer->setMemoryCallback(get_memory);
+ if(rc != NO_ERROR) {
+ LOGW("Error setting memory callback");
+ }
+ // actual user callback cookie is saved in Muxer
+ // this will be used to deliver final MPO callback to the framework
+ rc = gMuxer->setMpoCallbackCookie(user);
+ if(rc != NO_ERROR) {
+ LOGW("Error setting mpo cookie");
+ }
+
+ LOGH("X");
+
+}
+
+/*===========================================================================
+ * FUNCTION : enable_msg_type
+ *
+ * DESCRIPTION: Enable msg_type to send callbacks
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @msg_type: callback Message type to be enabled
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+ hwi->enable_msg_type(pCam->dev, msg_type);
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : disable_msg_type
+ *
+ * DESCRIPTION: disable msg_type to send callbacks
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @msg_type: callback Message type to be disabled
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+ hwi->disable_msg_type(pCam->dev, msg_type);
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : msg_type_enabled
+ *
+ * DESCRIPTION: Check if message type enabled
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @msg_type: message type
+ *
+ * RETURN : true/false
+ *==========================================================================*/
+int QCameraMuxer::msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ return hwi->msg_type_enabled(pCam->dev, msg_type);
+ }
+ }
+ LOGH("X");
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : start_preview
+ *
+ * DESCRIPTION: Starts logical camera preview
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::start_preview(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ // prepare preview first for all cameras
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->prepare_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error preparing preview !! ");
+ return rc;
+ }
+ }
+
+ if (cam->numCameras > 1) {
+ uint sessionId = 0;
+ // Set up sync for camera sessions
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if(pCam->mode == CAM_MODE_PRIMARY) {
+ // bundle primary cam with all aux cameras
+ for (uint32_t j = 0; j < cam->numCameras; j++) {
+ if (j == cam->nPrimaryPhyCamIndex) {
+ continue;
+ }
+ sessionId = cam->sId[j];
+ LOGH("Related cam id: %d, server id: %d sync ON"
+ " related session_id %d",
+ cam->pId[i], cam->sId[i], sessionId);
+ rc = hwi->bundleRelatedCameras(true, sessionId);
+ if (rc != NO_ERROR) {
+ LOGE("Error Bundling physical cameras !! ");
+ return rc;
+ }
+ }
+ }
+
+ if (pCam->mode == CAM_MODE_SECONDARY) {
+ // bundle all aux cam with primary cams
+ sessionId = cam->sId[cam->nPrimaryPhyCamIndex];
+ LOGH("Related cam id: %d, server id: %d sync ON"
+ " related session_id %d",
+ cam->pId[i], cam->sId[i], sessionId);
+ rc = hwi->bundleRelatedCameras(true, sessionId);
+ if (rc != NO_ERROR) {
+ LOGE("Error Bundling physical cameras !! ");
+ return rc;
+ }
+ }
+ }
+
+ // Remember Sync is ON
+ cam->bSyncOn = true;
+ }
+ // Start Preview for all cameras
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+ rc = hwi->start_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error starting preview !! ");
+ return rc;
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stop_preview
+ *
+ * DESCRIPTION: Stops logical camera preview
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::stop_preview(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ QCamera2HardwareInterface::stop_preview(pCam->dev);
+ }
+
+ //Flush JPEG Queues. Nodes in Main and Aux JPEGQ are not valid after preview stopped.
+ gMuxer->m_MainJpegQ.flush();
+ gMuxer->m_AuxJpegQ.flush();
+ LOGH(" X");
+}
+
+/*===========================================================================
+ * FUNCTION : preview_enabled
+ *
+ * DESCRIPTION: Checks preview enabled
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : true/false
+ *==========================================================================*/
+int QCameraMuxer::preview_enabled(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ return hwi->preview_enabled(pCam->dev);
+ }
+ }
+ LOGH("X");
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : store_meta_data_in_buffers
+ *
+ * DESCRIPTION: Stores metadata in buffers
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @enable: Enable/disable metadata
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->store_meta_data_in_buffers(pCam->dev, enable);
+ if (rc != NO_ERROR) {
+ LOGE("Error storing metat data !! ");
+ return rc;
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : start_recording
+ *
+ * DESCRIPTION: Starts recording on camcorder
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::start_recording(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ bool previewRestartNeeded = false;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ // In cases where recording hint is not set, hwi->start_recording will
+ // internally restart the preview.
+ // To take the preview restart control in muxer,
+ // 1. call pre_start_recording first
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->pre_start_recording(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error preparing recording start!! ");
+ return rc;
+ }
+ }
+
+ // 2. Check if preview restart is needed. Check all cameras.
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (hwi->isPreviewRestartNeeded()) {
+ previewRestartNeeded = hwi->isPreviewRestartNeeded();
+ break;
+ }
+ }
+
+ if (previewRestartNeeded) {
+ // 3. if preview restart needed. stop the preview first
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->restart_stop_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error in restart stop preview!! ");
+ return rc;
+ }
+ }
+
+ //4. Update the recording hint value to TRUE
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->setRecordingHintValue(TRUE);
+ if (rc != NO_ERROR) {
+ LOGE("Error in setting recording hint value!! ");
+ return rc;
+ }
+ gMuxer->m_bRecordingHintInternallySet = TRUE;
+ }
+
+ // 5. start the preview
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->restart_start_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error in restart start preview!! ");
+ return rc;
+ }
+ }
+ }
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ rc = hwi->start_recording(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error starting recording!! ");
+ }
+ break;
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stop_recording
+ *
+ * DESCRIPTION: Stops recording on camcorder
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::stop_recording(struct camera_device * device)
+{
+
+ int rc = NO_ERROR;
+ LOGH("E");
+
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ QCamera2HardwareInterface::stop_recording(pCam->dev);
+ break;
+ }
+ }
+
+ // If recording hint is set internally to TRUE,
+ // we need to set it to FALSE.
+ // preview restart is needed in between
+ if (gMuxer->m_bRecordingHintInternallySet) {
+ // stop the preview first
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ rc = hwi->restart_stop_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error in restart stop preview!! ");
+ return;
+ }
+ }
+
+ // Update the recording hint value to FALSE
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ rc = hwi->setRecordingHintValue(FALSE);
+ if (rc != NO_ERROR) {
+ LOGE("Error in setting recording hint value!! ");
+ return;
+ }
+ gMuxer->m_bRecordingHintInternallySet = FALSE;
+ }
+
+ // start the preview
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ rc = hwi->restart_start_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error in restart start preview!! ");
+ return;
+ }
+ }
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : recording_enabled
+ *
+ * DESCRIPTION: Checks for recording enabled
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : true/false
+ *==========================================================================*/
+int QCameraMuxer::recording_enabled(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ return hwi->recording_enabled(pCam->dev);
+ }
+ }
+ LOGH("X");
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : release_recording_frame
+ *
+ * DESCRIPTION: Release the recording frame
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @opaque: Frame to be released
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ QCamera2HardwareInterface::release_recording_frame(pCam->dev, opaque);
+ break;
+ }
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : auto_focus
+ *
+ * DESCRIPTION: Performs auto focus on camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::auto_focus(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+ // Call auto focus on main camera
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ rc = QCamera2HardwareInterface::auto_focus(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error auto focusing !! ");
+ return rc;
+ }
+ break;
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancel_auto_focus
+ *
+ * DESCRIPTION: Cancels auto focus
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::cancel_auto_focus(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+ // Cancel auto focus on primary camera
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ rc = QCamera2HardwareInterface::cancel_auto_focus(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error cancelling auto focus !! ");
+ return rc;
+ }
+ break;
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : take_picture
+ *
+ * DESCRIPTION: Take snapshots on device
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::take_picture(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ bool previewRestartNeeded = false;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.dual.camera.mpo", prop, "1");
+ gMuxer->m_bMpoEnabled = atoi(prop);
+ // If only one Physical Camera included in Logical, disable MPO
+ int numOfAcitvePhyCam = 0;
+ gMuxer->getActiveNumOfPhyCam(cam, numOfAcitvePhyCam);
+ if (gMuxer->m_bMpoEnabled && numOfAcitvePhyCam <= 1) {
+ gMuxer->m_bMpoEnabled = 0;
+ }
+ LOGH("dualCamera MPO Enabled:%d ", gMuxer->m_bMpoEnabled);
+
+ if (!gMuxer->mJpegClientHandle) {
+ // set up jpeg handles
+ pCam = gMuxer->getPhysicalCamera(cam, 0);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->getJpegHandleInfo(&gMuxer->mJpegOps, &gMuxer->mJpegMpoOps,
+ &gMuxer->mJpegClientHandle);
+ if (rc != NO_ERROR) {
+ LOGE("Error retrieving jpeg handle!");
+ return rc;
+ }
+
+ for (uint32_t i = 1; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->setJpegHandleInfo(&gMuxer->mJpegOps, &gMuxer->mJpegMpoOps,
+ gMuxer->mJpegClientHandle);
+ if (rc != NO_ERROR) {
+ LOGE("Error setting jpeg handle %d!", i);
+ return rc;
+ }
+ }
+ }
+
+ // prepare snapshot for main camera
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ rc = hwi->prepare_snapshot(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error preparing for snapshot !! ");
+ return rc;
+ }
+ }
+ // set Mpo composition for each session
+ rc = hwi->setMpoComposition(gMuxer->m_bMpoEnabled);
+ //disable MPO if AOST features are enabled
+ if (rc != NO_ERROR) {
+ gMuxer->m_bMpoEnabled = 0;
+ rc = NO_ERROR;
+ }
+ }
+
+ // initialize Jpeg Queues
+ gMuxer->m_MainJpegQ.init();
+ gMuxer->m_AuxJpegQ.init();
+ gMuxer->m_ComposeMpoTh.sendCmd(
+ CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+
+ // In cases where recording hint is set, preview is running,
+ // hwi->take_picture will internally restart the preview.
+ // To take the preview restart control in muxer,
+ // 1. call pre_take_picture first
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ // no need to call pre_take_pic on Aux if not MPO (for AOST,liveshot...etc.)
+ if ( (gMuxer->m_bMpoEnabled == 1) || (pCam->mode == CAM_MODE_PRIMARY) ) {
+ rc = hwi->pre_take_picture(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error preparing take_picture!! ");
+ return rc;
+ }
+ }
+ }
+
+ // 2. Check if preview restart is needed. Check all cameras.
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if (hwi->isPreviewRestartNeeded()) {
+ previewRestartNeeded = hwi->isPreviewRestartNeeded();
+ break;
+ }
+ }
+
+ if (previewRestartNeeded) {
+ // 3. if preview restart needed. stop the preview first
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->restart_stop_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error in restart stop preview!! ");
+ return rc;
+ }
+ }
+
+ //4. Update the recording hint value to FALSE
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->setRecordingHintValue(FALSE);
+ if (rc != NO_ERROR) {
+ LOGE("Error in setting recording hint value!! ");
+ return rc;
+ }
+ }
+
+ // 5. start the preview
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = hwi->restart_start_preview(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error in restart start preview!! ");
+ return rc;
+ }
+ }
+ }
+
+ // As frame sync for dual cameras is enabled, the take picture call
+ // for secondary camera is handled only till HAL level to init corresponding
+ // pproc channel and update statemachine.
+ // This call is forwarded to mm-camera-intf only for primary camera
+ // Primary camera should receive the take picture call after all secondary
+ // camera statemachines are updated
+ for (int32_t i = cam->numCameras-1 ; i >= 0; i--) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ // no need to call take_pic on Aux if not MPO (for AOST)
+ if ( (gMuxer->m_bMpoEnabled == 1) || (pCam->mode == CAM_MODE_PRIMARY) ) {
+ rc = QCamera2HardwareInterface::take_picture(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error taking picture !! ");
+ return rc;
+ }
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : cancel_picture
+ *
+ * DESCRIPTION: Cancel the take picture call
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::cancel_picture(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = QCamera2HardwareInterface::cancel_picture(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error cancelling picture !! ");
+ return rc;
+ }
+ }
+ gMuxer->m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, FALSE);
+ // flush Jpeg Queues
+ gMuxer->m_MainJpegQ.flush();
+ gMuxer->m_AuxJpegQ.flush();
+
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : set_parameters
+ *
+ * DESCRIPTION: Sets the parameters on camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @parms : Parameters to be set on camera
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::set_parameters(struct camera_device * device,
+ const char *parms)
+
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ bool needRestart = false;
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = QCamera2HardwareInterface::set_parameters(pCam->dev, parms);
+ if (rc != NO_ERROR) {
+ LOGE("Error setting parameters !! ");
+ return rc;
+ }
+
+ needRestart |= hwi->getNeedRestart();
+ }
+
+ if (needRestart) {
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ LOGD("stopping preview for cam %d", i);
+ rc = QCamera2HardwareInterface::stop_after_set_params(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error stopping camera rc=%d!! ", rc);
+ return rc;
+ }
+ }
+ }
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ LOGD("commiting parameters for cam %d", i);
+ rc = QCamera2HardwareInterface::commit_params(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error committing parameters rc=%d!! ", rc);
+ return rc;
+ }
+ }
+
+ if (needRestart) {
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ LOGD("restarting preview for cam %d", i);
+ rc = QCamera2HardwareInterface::restart_after_set_params(pCam->dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error restarting camera rc=%d!! ", rc);
+ return rc;
+ }
+ }
+ }
+
+ LOGH(" X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : get_parameters
+ *
+ * DESCRIPTION: Gets the parameters on camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : Parameter string or NULL
+ *==========================================================================*/
+char* QCameraMuxer::get_parameters(struct camera_device * device)
+{
+ LOGH("E");
+
+ if (!gMuxer)
+ return NULL;
+
+ char* ret = NULL;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ if (!cam) {
+ LOGE("Error getting logical camera");
+ return NULL;
+ }
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ if (!pCam) {
+ LOGE("Error getting physical camera");
+ return NULL;
+ }
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ if (!hwi) {
+ LOGE("Allocation of hardware interface failed");
+ return NULL;
+ }
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ // Get only primary camera parameters
+ ret = QCamera2HardwareInterface::get_parameters(pCam->dev);
+ break;
+ }
+ }
+
+ LOGH("X");
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : put_parameters
+ *
+ * DESCRIPTION: Puts parameters on camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @parm : parameters
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::put_parameters(struct camera_device * device, char *parm)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ if (pCam->mode == CAM_MODE_PRIMARY) {
+ // Parameters are not used in HWI and hence freed
+ QCamera2HardwareInterface::put_parameters(pCam->dev, parm);
+ break;
+ }
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : send_command
+ *
+ * DESCRIPTION: Send command to camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @cmd : Command
+ * @arg1/arg2 : command arguments
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = QCamera2HardwareInterface::send_command(pCam->dev, cmd, arg1, arg2);
+ if (rc != NO_ERROR) {
+ LOGE("Error sending command !! ");
+ return rc;
+ }
+ }
+
+ switch (cmd) {
+#ifndef VANILLA_HAL
+ case CAMERA_CMD_LONGSHOT_ON:
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = QCamera2HardwareInterface::send_command_restart(pCam->dev,
+ cmd, arg1, arg2);
+ if (rc != NO_ERROR) {
+ LOGE("Error sending command restart !! ");
+ return rc;
+ }
+ }
+ break;
+ case CAMERA_CMD_LONGSHOT_OFF:
+ gMuxer->m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
+ FALSE, FALSE);
+ // flush Jpeg Queues
+ gMuxer->m_MainJpegQ.flush();
+ gMuxer->m_AuxJpegQ.flush();
+ break;
+#endif
+ default:
+ // do nothing
+ rc = NO_ERROR;
+ break;
+ }
+
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : release
+ *
+ * DESCRIPTION: Release the camera
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::release(struct camera_device * device)
+{
+ LOGH("E");
+ CHECK_MUXER();
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI(hwi);
+
+ QCamera2HardwareInterface::release(pCam->dev);
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : dump
+ *
+ * DESCRIPTION: Dump the camera info
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ * @fd : fd
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::dump(struct camera_device * device, int fd)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+ CHECK_CAMERA_ERROR(cam);
+
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ rc = QCamera2HardwareInterface::dump(pCam->dev, fd);
+ if (rc != NO_ERROR) {
+ LOGE("Error dumping");
+ return rc;
+ }
+ }
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : close_camera_device
+ *
+ * DESCRIPTION: Close the camera
+ *
+ * PARAMETERS :
+ * @hw_dev : camera hardware device info
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::close_camera_device(hw_device_t *hw_dev)
+{
+ LOGH("E");
+ CHECK_MUXER_ERROR();
+ int rc = NO_ERROR;
+ qcamera_physical_descriptor_t *pCam = NULL;
+ camera_device_t *cam_dev = (camera_device_t*)hw_dev;
+ qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(cam_dev);
+ CHECK_CAMERA_ERROR(cam);
+
+ // Unlink camera sessions
+ if (cam->bSyncOn) {
+ if (cam->numCameras > 1) {
+ uint sessionId = 0;
+ // unbundle primary camera with all aux cameras
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ if(pCam->mode == CAM_MODE_PRIMARY) {
+ // bundle primary cam with all aux cameras
+ for (uint32_t j = 0; j < cam->numCameras; j++) {
+ if (j == cam->nPrimaryPhyCamIndex) {
+ continue;
+ }
+ sessionId = cam->sId[j];
+ LOGH("Related cam id: %d, server id: %d sync OFF"
+ " related session_id %d",
+ cam->pId[i], cam->sId[i], sessionId);
+ rc = hwi->bundleRelatedCameras(false, sessionId);
+ if (rc != NO_ERROR) {
+ LOGE("Error Bundling physical cameras !! ");
+ break;
+ }
+ }
+ }
+
+ if (pCam->mode == CAM_MODE_SECONDARY) {
+ // unbundle all aux cam with primary cams
+ sessionId = cam->sId[cam->nPrimaryPhyCamIndex];
+ LOGH("Related cam id: %d, server id: %d sync OFF"
+ " related session_id %d",
+ cam->pId[i], cam->sId[i], sessionId);
+ rc = hwi->bundleRelatedCameras(false, sessionId);
+ if (rc != NO_ERROR) {
+ LOGE("Error Bundling physical cameras !! ");
+ break;
+ }
+ }
+ }
+ }
+ cam->bSyncOn = false;
+ }
+
+ // Attempt to close all cameras regardless of unbundle results
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ pCam = gMuxer->getPhysicalCamera(cam, i);
+ CHECK_CAMERA_ERROR(pCam);
+
+ hw_device_t *dev = (hw_device_t*)(pCam->dev);
+ LOGH("hw device %x, hw %x", dev, pCam->hwi);
+
+ rc = QCamera2HardwareInterface::close_camera_device(dev);
+ if (rc != NO_ERROR) {
+ LOGE("Error closing camera");
+ }
+ pCam->hwi = NULL;
+ pCam->dev = NULL;
+ }
+
+ // Reset JPEG client handle
+ gMuxer->setJpegHandle(0);
+ LOGH("X, rc: %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setupLogicalCameras
+ *
+ * DESCRIPTION : Creates Camera Muxer if not created
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::setupLogicalCameras()
+{
+ int rc = NO_ERROR;
+ char prop[PROPERTY_VALUE_MAX];
+ int i = 0;
+ int primaryType = CAM_TYPE_MAIN;
+
+ LOGH("[%d] E: rc = %d", rc);
+ // Signifies whether AUX camera has to be exposed as physical camera
+ property_get("persist.camera.aux.camera", prop, "0");
+ m_bAuxCameraExposed = atoi(prop);
+
+ // Signifies whether AUX camera needs to be swapped
+ property_get("persist.camera.auxcamera.swap", prop, "0");
+ int swapAux = atoi(prop);
+ if (swapAux != 0) {
+ primaryType = CAM_TYPE_AUX;
+ }
+
+ // Check for number of camera present on device
+ if (!m_nPhyCameras || (m_nPhyCameras > MM_CAMERA_MAX_NUM_SENSORS)) {
+ LOGE("Error!! Invalid number of cameras: %d",
+ m_nPhyCameras);
+ return BAD_VALUE;
+ }
+
+ m_pPhyCamera = new qcamera_physical_descriptor_t[m_nPhyCameras];
+ if (!m_pPhyCamera) {
+ LOGE("Error allocating camera info buffer!!");
+ return NO_MEMORY;
+ }
+ memset(m_pPhyCamera, 0x00,
+ (m_nPhyCameras * sizeof(qcamera_physical_descriptor_t)));
+ uint32_t cameraId = 0;
+ m_nLogicalCameras = 0;
+
+ // Enumerate physical cameras and logical
+ for (i = 0; i < m_nPhyCameras ; i++, cameraId++) {
+ camera_info *info = &m_pPhyCamera[i].cam_info;
+ rc = QCamera2HardwareInterface::getCapabilities(cameraId,
+ info, &m_pPhyCamera[i].type);
+ m_pPhyCamera[i].id = cameraId;
+ m_pPhyCamera[i].device_version = CAMERA_DEVICE_API_VERSION_1_0;
+ m_pPhyCamera[i].mode = CAM_MODE_PRIMARY;
+
+ if (!m_bAuxCameraExposed && (m_pPhyCamera[i].type != primaryType)) {
+ m_pPhyCamera[i].mode = CAM_MODE_SECONDARY;
+ LOGH("Camera ID: %d, Aux Camera, type: %d, facing: %d",
+ cameraId, m_pPhyCamera[i].type,
+ m_pPhyCamera[i].cam_info.facing);
+ }
+ else {
+ m_nLogicalCameras++;
+ LOGH("Camera ID: %d, Main Camera, type: %d, facing: %d",
+ cameraId, m_pPhyCamera[i].type,
+ m_pPhyCamera[i].cam_info.facing);
+ }
+ }
+
+ if (!m_nLogicalCameras) {
+ // No Main camera detected, return from here
+ LOGE("Error !!!! detecting main camera!!");
+ delete [] m_pPhyCamera;
+ m_pPhyCamera = NULL;
+ return -ENODEV;
+ }
+ // Allocate Logical Camera descriptors
+ m_pLogicalCamera = new qcamera_logical_descriptor_t[m_nLogicalCameras];
+ if (!m_pLogicalCamera) {
+ LOGE("Error !!!! allocating camera info buffer!!");
+ delete [] m_pPhyCamera;
+ m_pPhyCamera = NULL;
+ return NO_MEMORY;
+ }
+ memset(m_pLogicalCamera, 0x00,
+ (m_nLogicalCameras * sizeof(qcamera_logical_descriptor_t)));
+ // Assign MAIN cameras for each logical camera
+ int index = 0;
+ for (i = 0; i < m_nPhyCameras ; i++) {
+ if (m_pPhyCamera[i].mode == CAM_MODE_PRIMARY) {
+ m_pLogicalCamera[index].nPrimaryPhyCamIndex = 0;
+ m_pLogicalCamera[index].id = index;
+ m_pLogicalCamera[index].device_version = CAMERA_DEVICE_API_VERSION_1_0;
+ m_pLogicalCamera[index].pId[0] = i;
+ m_pLogicalCamera[index].type[0] = CAM_TYPE_MAIN;
+ m_pLogicalCamera[index].mode[0] = CAM_MODE_PRIMARY;
+ m_pLogicalCamera[index].facing = m_pPhyCamera[i].cam_info.facing;
+ m_pLogicalCamera[index].numCameras++;
+ LOGH("Logical Main Camera ID: %d, facing: %d,"
+ "Phy Id: %d type: %d mode: %d",
+ m_pLogicalCamera[index].id,
+ m_pLogicalCamera[index].facing,
+ m_pLogicalCamera[index].pId[0],
+ m_pLogicalCamera[index].type[0],
+ m_pLogicalCamera[index].mode[0]);
+
+ index++;
+ }
+ }
+ //Now assign AUX cameras to logical camera
+ for (i = 0; i < m_nPhyCameras ; i++) {
+ if (m_pPhyCamera[i].mode == CAM_MODE_SECONDARY) {
+ for (int j = 0; j < m_nLogicalCameras; j++) {
+ int n = m_pLogicalCamera[j].numCameras;
+ ///@note n can only be 1 at this point
+ if ((n < MAX_NUM_CAMERA_PER_BUNDLE) &&
+ (m_pLogicalCamera[j].facing ==
+ m_pPhyCamera[i].cam_info.facing)) {
+ m_pLogicalCamera[j].pId[n] = i;
+ m_pLogicalCamera[j].type[n] = CAM_TYPE_AUX;
+ m_pLogicalCamera[j].mode[n] = CAM_MODE_SECONDARY;
+ m_pLogicalCamera[j].numCameras++;
+ LOGH("Aux %d for Logical Camera ID: %d,"
+ "aux phy id:%d, type: %d mode: %d",
+ n, j, m_pLogicalCamera[j].pId[n],
+ m_pLogicalCamera[j].type[n], m_pLogicalCamera[j].mode[n]);
+ }
+ }
+ }
+ }
+ //Print logical and physical camera tables
+ for (i = 0; i < m_nLogicalCameras ; i++) {
+ for (uint8_t j = 0; j < m_pLogicalCamera[i].numCameras; j++) {
+ LOGH("Logical Camera ID: %d, index: %d, "
+ "facing: %d, Phy Id: %d type: %d mode: %d",
+ i, j, m_pLogicalCamera[i].facing,
+ m_pLogicalCamera[i].pId[j], m_pLogicalCamera[i].type[j],
+ m_pLogicalCamera[i].mode[j]);
+ }
+ }
+ LOGH("[%d] X: rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumberOfCameras
+ *
+ * DESCRIPTION: query number of logical cameras detected
+ *
+ * RETURN : number of cameras detected
+ *==========================================================================*/
+int QCameraMuxer::getNumberOfCameras()
+{
+ return m_nLogicalCameras;
+}
+
+/*===========================================================================
+ * FUNCTION : getCameraInfo
+ *
+ * DESCRIPTION: query camera information with its ID
+ *
+ * PARAMETERS :
+ * @camera_id : camera ID
+ * @info : ptr to camera info struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::getCameraInfo(int camera_id,
+ struct camera_info *info, __unused cam_sync_type_t *p_cam_type)
+{
+ int rc = NO_ERROR;
+ LOGH("E, camera_id = %d", camera_id);
+ cam_sync_type_t cam_type = CAM_TYPE_MAIN;
+
+ if (!m_nLogicalCameras || (camera_id >= m_nLogicalCameras) ||
+ !info || (camera_id < 0)) {
+ LOGE("m_nLogicalCameras: %d, camera id: %d",
+ m_nLogicalCameras, camera_id);
+ return -ENODEV;
+ }
+
+ if (!m_pLogicalCamera || !m_pPhyCamera) {
+ LOGE("Error! Cameras not initialized!");
+ return NO_INIT;
+ }
+ uint32_t phy_id =
+ m_pLogicalCamera[camera_id].pId[
+ m_pLogicalCamera[camera_id].nPrimaryPhyCamIndex];
+ // Call HAL3 getCamInfo to get the flash light info through static metatdata
+ // regardless of HAL version
+ rc = QCamera3HardwareInterface::getCamInfo(phy_id, info);
+ info->device_version = CAMERA_DEVICE_API_VERSION_1_0; // Hardcode the HAL to HAL1
+ LOGH("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setCallbacks
+ *
+ * DESCRIPTION: set callback functions to send asynchronous notifications to
+ * frameworks.
+ *
+ * PARAMETERS :
+ * @callbacks : callback function pointer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setCallbacks(const camera_module_callbacks_t *callbacks)
+{
+ if(callbacks) {
+ m_pCallbacks = callbacks;
+ return NO_ERROR;
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setDataCallback
+ *
+ * DESCRIPTION: set data callback function for snapshots
+ *
+ * PARAMETERS :
+ * @data_cb : callback function pointer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setDataCallback(camera_data_callback data_cb)
+{
+ if(data_cb) {
+ mDataCb = data_cb;
+ return NO_ERROR;
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setMemoryCallback
+ *
+ * DESCRIPTION: set get memory callback for memory allocations
+ *
+ * PARAMETERS :
+ * @get_memory : callback function pointer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setMemoryCallback(camera_request_memory get_memory)
+{
+ if(get_memory) {
+ mGetMemoryCb = get_memory;
+ return NO_ERROR;
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setMpoCallbackCookie
+ *
+ * DESCRIPTION: set mpo callback cookie. will be used for sending final MPO callbacks
+ * to framework
+ *
+ * PARAMETERS :
+ * @mpoCbCookie : callback function pointer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setMpoCallbackCookie(void* mpoCbCookie)
+{
+ if(mpoCbCookie) {
+ m_pMpoCallbackCookie = mpoCbCookie;
+ return NO_ERROR;
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getMpoCallbackCookie
+ *
+ * DESCRIPTION: gets the mpo callback cookie. will be used for sending final MPO callbacks
+ * to framework
+ *
+ * PARAMETERS :none
+ *
+ * RETURN :void ptr to the mpo callback cookie
+ *==========================================================================*/
+void* QCameraMuxer::getMpoCallbackCookie(void)
+{
+ return m_pMpoCallbackCookie;
+}
+
+/*===========================================================================
+ * FUNCTION : setMainJpegCallbackCookie
+ *
+ * DESCRIPTION: set jpeg callback cookie.
+ * set to phy cam instance of the primary related cam instance
+ *
+ * PARAMETERS :
+ * @jpegCbCookie : ptr to jpeg cookie
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setMainJpegCallbackCookie(void* jpegCbCookie)
+{
+ if(jpegCbCookie) {
+ m_pJpegCallbackCookie = jpegCbCookie;
+ return NO_ERROR;
+ } else {
+ return BAD_TYPE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getMainJpegCallbackCookie
+ *
+ * DESCRIPTION: gets the jpeg callback cookie for primary related cam instance
+ * set to phy cam instance of the primary related cam instance
+ *
+ * PARAMETERS :none
+ *
+ * RETURN :void ptr to the jpeg callback cookie
+ *==========================================================================*/
+void* QCameraMuxer::getMainJpegCallbackCookie(void)
+{
+ return m_pJpegCallbackCookie;
+}
+
+/*===========================================================================
+ * FUNCTION : cameraDeviceOpen
+ *
+ * DESCRIPTION: open a camera device with its ID
+ *
+ * PARAMETERS :
+ * @camera_id : camera ID
+ * @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::cameraDeviceOpen(int camera_id,
+ struct hw_device_t **hw_device)
+{
+ int rc = NO_ERROR;
+ uint32_t phyId = 0;
+ qcamera_logical_descriptor_t *cam = NULL;
+
+ if (camera_id < 0 || camera_id >= m_nLogicalCameras) {
+ LOGE("Camera id %d not found!", camera_id);
+ return -ENODEV;
+ }
+
+ if ( NULL == m_pLogicalCamera) {
+ LOGE("Hal descriptor table is not initialized!");
+ return NO_INIT;
+ }
+
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.dc.frame.sync", prop, "1");
+ m_bFrameSyncEnabled = atoi(prop);
+
+ // Get logical camera
+ cam = &m_pLogicalCamera[camera_id];
+
+ if (m_pLogicalCamera[camera_id].device_version ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ // HW Dev Holders
+ hw_device_t *hw_dev[cam->numCameras];
+
+ if (m_pPhyCamera[cam->pId[0]].type != CAM_TYPE_MAIN) {
+ LOGE("Physical camera at index 0 is not main!");
+ return UNKNOWN_ERROR;
+ }
+
+ // Open all physical cameras
+ for (uint32_t i = 0; i < cam->numCameras; i++) {
+ phyId = cam->pId[i];
+ QCamera2HardwareInterface *hw =
+ new QCamera2HardwareInterface((uint32_t)phyId);
+ if (!hw) {
+ LOGE("Allocation of hardware interface failed");
+ return NO_MEMORY;
+ }
+ hw_dev[i] = NULL;
+
+ // Make Camera HWI aware of its mode
+ cam_sync_related_sensors_event_info_t info;
+ info.sync_control = CAM_SYNC_RELATED_SENSORS_ON;
+ info.mode = m_pPhyCamera[phyId].mode;
+ info.type = m_pPhyCamera[phyId].type;
+ rc = hw->setRelatedCamSyncInfo(&info);
+ hw->setFrameSyncEnabled(m_bFrameSyncEnabled);
+ if (rc != NO_ERROR) {
+ LOGE("setRelatedCamSyncInfo failed %d", rc);
+ delete hw;
+ return rc;
+ }
+
+ rc = hw->openCamera(&hw_dev[i]);
+ if (rc != NO_ERROR) {
+ delete hw;
+ return rc;
+ }
+ hw->getCameraSessionId(&m_pPhyCamera[phyId].camera_server_id);
+ m_pPhyCamera[phyId].dev = reinterpret_cast<camera_device_t*>(hw_dev[i]);
+ m_pPhyCamera[phyId].hwi = hw;
+ cam->sId[i] = m_pPhyCamera[phyId].camera_server_id;
+ LOGH("camera id %d server id : %d hw device %x, hw %x",
+ phyId, cam->sId[i], hw_dev[i], hw);
+ }
+ } else {
+ LOGE("Device version for camera id %d invalid %d",
+ camera_id, m_pLogicalCamera[camera_id].device_version);
+ return BAD_VALUE;
+ }
+
+ cam->dev.common.tag = HARDWARE_DEVICE_TAG;
+ cam->dev.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
+ cam->dev.common.close = close_camera_device;
+ cam->dev.ops = &mCameraMuxerOps;
+ cam->dev.priv = (void*)cam;
+ *hw_device = &cam->dev.common;
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : getLogicalCamera
+ *
+ * DESCRIPTION: Get logical camera descriptor
+ *
+ * PARAMETERS :
+ * @device : camera hardware device info
+ *
+ * RETURN : logical camera descriptor or NULL
+ *==========================================================================*/
+qcamera_logical_descriptor_t* QCameraMuxer::getLogicalCamera(
+ struct camera_device * device)
+{
+ if(device && device->priv){
+ return (qcamera_logical_descriptor_t*)(device->priv);
+ }
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : getPhysicalCamera
+ *
+ * DESCRIPTION: Get physical camera descriptor
+ *
+ * PARAMETERS :
+ * @log_cam : Logical camera descriptor
+ * @index : physical camera index
+ *
+ * RETURN : physical camera descriptor or NULL
+ *==========================================================================*/
+qcamera_physical_descriptor_t* QCameraMuxer::getPhysicalCamera(
+ qcamera_logical_descriptor_t* log_cam, uint32_t index)
+{
+ if(!log_cam){
+ return NULL;
+ }
+ return &m_pPhyCamera[log_cam->pId[index]];
+}
+
+/*===========================================================================
+ * FUNCTION : getActiveNumOfPhyCam
+ *
+ * DESCRIPTION: Get active physical camera number in Logical Camera
+ *
+ * PARAMETERS :
+ * @log_cam : Logical camera descriptor
+ * @numOfAcitvePhyCam : number of active physical camera in Logical Camera.
+ *
+ * RETURN :
+ * NO_ERROR : success
+ * ENODEV : Camera not found
+ * other: non-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::getActiveNumOfPhyCam(
+ qcamera_logical_descriptor_t* log_cam, int& numOfAcitvePhyCam)
+{
+ CHECK_CAMERA_ERROR(log_cam);
+
+ numOfAcitvePhyCam = log_cam->numCameras;
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify to HWI for error callbacks
+ *
+ * PARAMETERS :
+ * @msg_type: msg type to be sent
+ * @ext1 : optional extension1
+ * @ext2 : optional extension2
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::sendEvtNotify(int32_t msg_type, int32_t ext1,
+ int32_t ext2)
+{
+ LOGH("E");
+
+ CHECK_MUXER_ERROR();
+
+ qcamera_physical_descriptor_t *pCam = NULL;
+ pCam = (qcamera_physical_descriptor_t*)(gMuxer->getMainJpegCallbackCookie());
+
+ CHECK_CAMERA_ERROR(pCam);
+
+ QCamera2HardwareInterface *hwi = pCam->hwi;
+ CHECK_HWI_ERROR(hwi);
+
+ LOGH("X");
+ return pCam->hwi->sendEvtNotify(msg_type, ext1, ext2);
+}
+
+/*===========================================================================
+ * FUNCTION : composeMpo
+ *
+ * DESCRIPTION: Composition of the 2 MPOs
+ *
+ * PARAMETERS : none
+ * @main_Jpeg: pointer to info to Main Jpeg
+ * @aux_Jpeg : pointer to info to Aux JPEG
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMuxer::composeMpo(cam_compose_jpeg_info_t* main_Jpeg,
+ cam_compose_jpeg_info_t* aux_Jpeg)
+{
+ LOGH("E Main Jpeg %p Aux Jpeg %p", main_Jpeg, aux_Jpeg);
+
+ CHECK_MUXER();
+ if(main_Jpeg == NULL || aux_Jpeg == NULL) {
+ LOGE("input buffers invalid, ret = NO_MEMORY");
+ gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ return;
+ }
+
+ pthread_mutex_lock(&m_JpegLock);
+
+ m_pRelCamMpoJpeg = mGetMemoryCb(-1, main_Jpeg->buffer->size +
+ aux_Jpeg->buffer->size, 1, m_pMpoCallbackCookie);
+ if (NULL == m_pRelCamMpoJpeg) {
+ LOGE("getMemory for mpo, ret = NO_MEMORY");
+ gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ pthread_mutex_unlock(&m_JpegLock);
+ return;
+ }
+
+ // fill all structures to send for composition
+ mm_jpeg_mpo_info_t mpo_compose_info;
+ mpo_compose_info.num_of_images = 2;
+ mpo_compose_info.primary_image.buf_filled_len = main_Jpeg->buffer->size;
+ mpo_compose_info.primary_image.buf_vaddr =
+ (uint8_t*)(main_Jpeg->buffer->data);
+ mpo_compose_info.aux_images[0].buf_filled_len = aux_Jpeg->buffer->size;
+ mpo_compose_info.aux_images[0].buf_vaddr =
+ (uint8_t*)(aux_Jpeg->buffer->data);
+ mpo_compose_info.output_buff.buf_vaddr =
+ (uint8_t*)m_pRelCamMpoJpeg->data;
+ mpo_compose_info.output_buff.buf_filled_len = 0;
+ mpo_compose_info.output_buff_size = main_Jpeg->buffer->size +
+ aux_Jpeg->buffer->size;
+
+ LOGD("MPO buffer size %d\n"
+ "expected size %d, mpo_compose_info.output_buff_size %d",
+ m_pRelCamMpoJpeg->size,
+ main_Jpeg->buffer->size + aux_Jpeg->buffer->size,
+ mpo_compose_info.output_buff_size);
+
+ LOGD("MPO primary buffer filled lengths\n"
+ "mpo_compose_info.primary_image.buf_filled_len %d\n"
+ "mpo_compose_info.primary_image.buf_vaddr %p",
+ mpo_compose_info.primary_image.buf_filled_len,
+ mpo_compose_info.primary_image.buf_vaddr);
+
+ LOGD("MPO aux buffer filled lengths\n"
+ "mpo_compose_info.aux_images[0].buf_filled_len %d"
+ "mpo_compose_info.aux_images[0].buf_vaddr %p",
+ mpo_compose_info.aux_images[0].buf_filled_len,
+ mpo_compose_info.aux_images[0].buf_vaddr);
+
+ if(m_bDumpImages) {
+ LOGD("Dumping Main Image for MPO");
+ char buf_main[QCAMERA_MAX_FILEPATH_LENGTH];
+ memset(buf_main, 0, sizeof(buf_main));
+ snprintf(buf_main, sizeof(buf_main),
+ QCAMERA_DUMP_FRM_LOCATION "Main.jpg");
+
+ int file_fd_main = open(buf_main, O_RDWR | O_CREAT, 0777);
+ if (file_fd_main >= 0) {
+ ssize_t written_len = write(file_fd_main,
+ mpo_compose_info.primary_image.buf_vaddr,
+ mpo_compose_info.primary_image.buf_filled_len);
+ fchmod(file_fd_main, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+ LOGD("written number of bytes for main Image %zd\n",
+ written_len);
+ close(file_fd_main);
+ }
+
+ LOGD("Dumping Aux Image for MPO");
+ char buf_aux[QCAMERA_MAX_FILEPATH_LENGTH];
+ memset(buf_aux, 0, sizeof(buf_aux));
+ snprintf(buf_aux, sizeof(buf_aux),
+ QCAMERA_DUMP_FRM_LOCATION "Aux.jpg");
+
+ int file_fd_aux = open(buf_aux, O_RDWR | O_CREAT, 0777);
+ if (file_fd_aux >= 0) {
+ ssize_t written_len = write(file_fd_aux,
+ mpo_compose_info.aux_images[0].buf_vaddr,
+ mpo_compose_info.aux_images[0].buf_filled_len);
+ fchmod(file_fd_aux, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+ LOGD("written number of bytes for Aux Image %zd\n",
+ written_len);
+ close(file_fd_aux);
+ }
+ }
+
+ int32_t rc = mJpegMpoOps.compose_mpo(&mpo_compose_info);
+ LOGD("Compose mpo returned %d", rc);
+
+ if(rc != NO_ERROR) {
+ LOGE("ComposeMpo failed, ret = %d", rc);
+ gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ pthread_mutex_unlock(&m_JpegLock);
+ return;
+ }
+
+ if(m_bDumpImages) {
+ char buf_mpo[QCAMERA_MAX_FILEPATH_LENGTH];
+ memset(buf_mpo, 0, sizeof(buf_mpo));
+ snprintf(buf_mpo, sizeof(buf_mpo),
+ QCAMERA_DUMP_FRM_LOCATION "Composed.MPO");
+
+ int file_fd_mpo = open(buf_mpo, O_RDWR | O_CREAT, 0777);
+ if (file_fd_mpo >= 0) {
+ ssize_t written_len = write(file_fd_mpo,
+ m_pRelCamMpoJpeg->data,
+ m_pRelCamMpoJpeg->size);
+ fchmod(file_fd_mpo, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+ LOGD("written number of bytes for MPO Image %zd\n",
+ written_len);
+ close(file_fd_mpo);
+ }
+ }
+
+ mDataCb(main_Jpeg->msg_type,
+ m_pRelCamMpoJpeg,
+ main_Jpeg->index,
+ main_Jpeg->metadata,
+ m_pMpoCallbackCookie);
+
+ if (NULL != m_pRelCamMpoJpeg) {
+ m_pRelCamMpoJpeg->release(m_pRelCamMpoJpeg);
+ m_pRelCamMpoJpeg = NULL;
+ }
+
+ pthread_mutex_unlock(&m_JpegLock);
+ LOGH("X");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : matchFrameId
+ *
+ * DESCRIPTION: function to match frame ids within queue nodes
+ *
+ * PARAMETERS :
+ * @data: pointer to queue node to be matched for condition
+ * @user_data: caller can add more info here
+ * @match_data : value to be matched against
+ *
+ * RETURN : true or false based on whether match was successful or not
+ *==========================================================================*/
+bool QCameraMuxer::matchFrameId(void *data, __unused void *user_data,
+ void *match_data)
+{
+ LOGH("E");
+
+ if (!data || !match_data) {
+ return false;
+ }
+
+ cam_compose_jpeg_info_t * node = (cam_compose_jpeg_info_t *) data;
+ uint32_t frame_idx = *((uint32_t *) match_data);
+ LOGH("X");
+ return node->frame_idx == frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION : findPreviousJpegs
+ *
+ * DESCRIPTION: Finds Jpegs in the queue with index less than delivered one
+ *
+ * PARAMETERS :
+ * @data: pointer to queue node to be matched for condition
+ * @user_data: caller can add more info here
+ * @match_data : value to be matched against
+ *
+ * RETURN : true or false based on whether match was successful or not
+ *==========================================================================*/
+bool QCameraMuxer::findPreviousJpegs(void *data, __unused void *user_data,
+ void *match_data)
+{
+ LOGH("E");
+
+ if (!data || !match_data) {
+ return false;
+ }
+ cam_compose_jpeg_info_t * node = (cam_compose_jpeg_info_t *) data;
+ uint32_t frame_idx = *((uint32_t *) match_data);
+ LOGH("X");
+ return node->frame_idx < frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseJpegInfo
+ *
+ * DESCRIPTION: callback function for the release of individual nodes
+ * in the JPEG queues.
+ *
+ * PARAMETERS :
+ * @data : ptr to the data to be released
+ * @user_data : caller can add more info here
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::releaseJpegInfo(void *data, __unused void *user_data)
+{
+ LOGH("E");
+
+ cam_compose_jpeg_info_t *jpegInfo = (cam_compose_jpeg_info_t *)data;
+ if(jpegInfo && jpegInfo->release_cb) {
+ if (jpegInfo->release_data != NULL) {
+ jpegInfo->release_cb(jpegInfo->release_data,
+ jpegInfo->release_cookie,
+ NO_ERROR);
+ }
+ }
+ LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION : composeMpoRoutine
+ *
+ * DESCRIPTION: specialized thread for MPO composition
+ *
+ * PARAMETERS :
+ * @data : pointer to the thread owner
+ *
+ * RETURN : void* to thread
+ *==========================================================================*/
+void* QCameraMuxer::composeMpoRoutine(__unused void *data)
+{
+ LOGH("E");
+ if (!gMuxer) {
+ LOGE("Error getting muxer ");
+ return NULL;
+ }
+
+ int running = 1;
+ int ret;
+ uint8_t is_active = FALSE;
+ QCameraCmdThread *cmdThread = &gMuxer->m_ComposeMpoTh;
+ cmdThread->setName("CAM_ComposeMpo");
+
+ do {
+ do {
+ ret = cam_sem_wait(&cmdThread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)", strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ // we got notified about new cmd avail in cmd queue
+ camera_cmd_type_t cmd = cmdThread->getCmd();
+ switch (cmd) {
+ case CAMERA_CMD_TYPE_START_DATA_PROC:
+ {
+ LOGH("start ComposeMpo processing");
+ is_active = TRUE;
+
+ // signal cmd is completed
+ cam_sem_post(&cmdThread->sync_sem);
+ }
+ break;
+ case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+ {
+ LOGH("stop ComposeMpo processing");
+ is_active = FALSE;
+
+ // signal cmd is completed
+ cam_sem_post(&cmdThread->sync_sem);
+ }
+ break;
+ case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+ {
+ if (is_active == TRUE) {
+ LOGH("Mpo Composition Requested");
+ cam_compose_jpeg_info_t *main_jpeg_node = NULL;
+ cam_compose_jpeg_info_t *aux_jpeg_node = NULL;
+ bool foundMatch = false;
+ while (!gMuxer->m_MainJpegQ.isEmpty() &&
+ !gMuxer->m_AuxJpegQ.isEmpty()) {
+ main_jpeg_node = (cam_compose_jpeg_info_t *)
+ gMuxer->m_MainJpegQ.dequeue();
+ if (main_jpeg_node != NULL) {
+ LOGD("main_jpeg_node found frame idx %d"
+ "ptr %p buffer_ptr %p buffer_size %d",
+ main_jpeg_node->frame_idx,
+ main_jpeg_node,
+ main_jpeg_node->buffer->data,
+ main_jpeg_node->buffer->size);
+ // find matching aux node in Aux Jpeg Queue
+ aux_jpeg_node =
+ (cam_compose_jpeg_info_t *) gMuxer->
+ m_AuxJpegQ.dequeue();
+ if (aux_jpeg_node != NULL) {
+ LOGD("aux_jpeg_node found frame idx %d"
+ "ptr %p buffer_ptr %p buffer_size %d",
+ aux_jpeg_node->frame_idx,
+ aux_jpeg_node,
+ aux_jpeg_node->buffer->data,
+ aux_jpeg_node->buffer->size);
+ foundMatch = true;
+ // start MPO composition
+ gMuxer->composeMpo(main_jpeg_node,
+ aux_jpeg_node);
+ }
+ }
+ if (main_jpeg_node != NULL) {
+ if ( main_jpeg_node->release_cb ) {
+ main_jpeg_node->release_cb(
+ main_jpeg_node->release_data,
+ main_jpeg_node->release_cookie,
+ NO_ERROR);
+ }
+ free(main_jpeg_node);
+ main_jpeg_node = NULL;
+ } else {
+ LOGH("Mpo Match not found");
+ }
+ if (aux_jpeg_node != NULL) {
+ if (aux_jpeg_node->release_cb) {
+ aux_jpeg_node->release_cb(
+ aux_jpeg_node->release_data,
+ aux_jpeg_node->release_cookie,
+ NO_ERROR);
+ }
+ free(aux_jpeg_node);
+ aux_jpeg_node = NULL;
+ } else {
+ LOGH("Mpo Match not found");
+ }
+ }
+ }
+ break;
+ }
+ case CAMERA_CMD_TYPE_EXIT:
+ LOGH("ComposeMpo thread exit");
+ running = 0;
+ break;
+ default:
+ break;
+ }
+ } while (running);
+ LOGH("X");
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : jpeg_data_callback
+ *
+ * DESCRIPTION: JPEG data callback for snapshot
+ *
+ * PARAMETERS :
+ * @msg_type : callback msg type
+ * @data : data ptr of the buffer
+ * @index : index of the frame
+ * @metadata : metadata associated with the buffer
+ * @user : callback cookie returned back to the user
+ * @frame_idx : frame index for matching frames
+ * @release_cb : callback function for releasing the data memory
+ * @release_cookie : cookie for the release callback function
+ * @release_data :pointer indicating what needs to be released
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMuxer::jpeg_data_callback(int32_t msg_type,
+ const camera_memory_t *data, unsigned int index,
+ camera_frame_metadata_t *metadata, void *user,
+ uint32_t frame_idx, camera_release_callback release_cb,
+ void *release_cookie, void *release_data)
+{
+ LOGH("E");
+ CHECK_MUXER();
+
+ if(data != NULL) {
+ LOGH("jpeg received: data %p size %d data ptr %p frameIdx %d",
+ data, data->size, data->data, frame_idx);
+ int rc = gMuxer->storeJpeg(((qcamera_physical_descriptor_t*)(user))->type,
+ msg_type, data, index, metadata, user, frame_idx, release_cb,
+ release_cookie, release_data);
+ if(rc != NO_ERROR) {
+ gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ }
+ } else {
+ gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ }
+ LOGH("X");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : storeJpeg
+ *
+ * DESCRIPTION: Stores jpegs from multiple related cam instances into a common Queue
+ *
+ * PARAMETERS :
+ * @cam_type : indicates whether main or aux camera sent the Jpeg callback
+ * @msg_type : callback msg type
+ * @data : data ptr of the buffer
+ * @index : index of the frame
+ * @metadata : metadata associated with the buffer
+ * @user : callback cookie returned back to the user
+ * @frame_idx : frame index for matching frames
+ * @release_cb : callback function for releasing the data memory
+ * @release_cookie : cookie for the release callback function
+ * @release_data :pointer indicating what needs to be released
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::storeJpeg(cam_sync_type_t cam_type,
+ int32_t msg_type, const camera_memory_t *data, unsigned int index,
+ camera_frame_metadata_t *metadata, void *user,uint32_t frame_idx,
+ camera_release_callback release_cb, void *release_cookie,
+ void *release_data)
+{
+ LOGH("E jpeg received: data %p size %d data ptr %p frameIdx %d",
+ data, data->size, data->data, frame_idx);
+
+ CHECK_MUXER_ERROR();
+
+ if (!m_bMpoEnabled) {
+ if (cam_type == CAM_TYPE_MAIN) {
+ // send data callback only incase of main camera
+ // aux image is ignored and released back
+ mDataCb(msg_type,
+ data,
+ index,
+ metadata,
+ m_pMpoCallbackCookie);
+ }
+ if (release_cb) {
+ release_cb(release_data, release_cookie, NO_ERROR);
+ }
+ LOGH("X");
+ return NO_ERROR;
+ }
+
+ cam_compose_jpeg_info_t* pJpegFrame =
+ (cam_compose_jpeg_info_t*)malloc(sizeof(cam_compose_jpeg_info_t));
+ if (!pJpegFrame) {
+ LOGE("Allocation failed for MPO nodes");
+ return NO_MEMORY;
+ }
+ memset(pJpegFrame, 0, sizeof(*pJpegFrame));
+
+ pJpegFrame->msg_type = msg_type;
+ pJpegFrame->buffer = const_cast<camera_memory_t*>(data);
+ pJpegFrame->index = index;
+ pJpegFrame->metadata = metadata;
+ pJpegFrame->user = user;
+ pJpegFrame->valid = true;
+ pJpegFrame->frame_idx = frame_idx;
+ pJpegFrame->release_cb = release_cb;
+ pJpegFrame->release_cookie = release_cookie;
+ pJpegFrame->release_data = release_data;
+ if(cam_type == CAM_TYPE_MAIN) {
+ if (m_MainJpegQ.enqueue((void *)pJpegFrame)) {
+ LOGD("Main FrameIdx %d", pJpegFrame->frame_idx);
+ if (m_MainJpegQ.getCurrentSize() > 0) {
+ LOGD("Trigger Compose");
+ m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ }
+ } else {
+ LOGE("Enqueue Failed for Main Jpeg Q");
+ if ( pJpegFrame->release_cb ) {
+ // release other buffer also here
+ pJpegFrame->release_cb(
+ pJpegFrame->release_data,
+ pJpegFrame->release_cookie,
+ NO_ERROR);
+ }
+ free(pJpegFrame);
+ pJpegFrame = NULL;
+ return NO_MEMORY;
+ }
+
+ } else {
+ if (m_AuxJpegQ.enqueue((void *)pJpegFrame)) {
+ LOGD("Aux FrameIdx %d", pJpegFrame->frame_idx);
+ if (m_AuxJpegQ.getCurrentSize() > 0) {
+ LOGD("Trigger Compose");
+ m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ }
+ } else {
+ LOGE("Enqueue Failed for Aux Jpeg Q");
+ if ( pJpegFrame->release_cb ) {
+ // release other buffer also here
+ pJpegFrame->release_cb(
+ pJpegFrame->release_data,
+ pJpegFrame->release_cookie,
+ NO_ERROR);
+ }
+ free(pJpegFrame);
+ pJpegFrame = NULL;
+ return NO_MEMORY;
+ }
+ }
+ LOGH("X");
+
+ return NO_ERROR;
+}
+
+
+// Muxer Ops
+camera_device_ops_t QCameraMuxer::mCameraMuxerOps = {
+ .set_preview_window = QCameraMuxer::set_preview_window,
+ .set_callbacks = QCameraMuxer::set_callBacks,
+ .enable_msg_type = QCameraMuxer::enable_msg_type,
+ .disable_msg_type = QCameraMuxer::disable_msg_type,
+ .msg_type_enabled = QCameraMuxer::msg_type_enabled,
+
+ .start_preview = QCameraMuxer::start_preview,
+ .stop_preview = QCameraMuxer::stop_preview,
+ .preview_enabled = QCameraMuxer::preview_enabled,
+ .store_meta_data_in_buffers= QCameraMuxer::store_meta_data_in_buffers,
+
+ .start_recording = QCameraMuxer::start_recording,
+ .stop_recording = QCameraMuxer::stop_recording,
+ .recording_enabled = QCameraMuxer::recording_enabled,
+ .release_recording_frame = QCameraMuxer::release_recording_frame,
+
+ .auto_focus = QCameraMuxer::auto_focus,
+ .cancel_auto_focus = QCameraMuxer::cancel_auto_focus,
+
+ .take_picture = QCameraMuxer::take_picture,
+ .cancel_picture = QCameraMuxer::cancel_picture,
+
+ .set_parameters = QCameraMuxer::set_parameters,
+ .get_parameters = QCameraMuxer::get_parameters,
+ .put_parameters = QCameraMuxer::put_parameters,
+ .send_command = QCameraMuxer::send_command,
+
+ .release = QCameraMuxer::release,
+ .dump = QCameraMuxer::dump,
+};
+
+
+}; // namespace android
diff --git a/camera/QCamera2/HAL/QCameraMuxer.h b/camera/QCamera2/HAL/QCameraMuxer.h
new file mode 100644
index 0000000..a85612e
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraMuxer.h
@@ -0,0 +1,284 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __QCAMERAMUXER_H__
+#define __QCAMERAMUXER_H__
+
+#include "camera.h"
+#include "QCamera2HWI.h"
+#include "QCamera3HWI.h"
+
+namespace qcamera {
+
+/* Struct@ qcamera_physical_descriptor_t
+ *
+ * Description@ This structure specifies various attributes
+ * physical cameras enumerated on the device
+ */
+typedef struct {
+ // Userspace Physical Camera ID
+ uint32_t id;
+ // Server Camera ID
+ uint32_t camera_server_id;
+ // Device version
+ uint32_t device_version;
+ // Specifies type of camera
+ cam_sync_type_t type;
+ // Specifies mode of Camera
+ cam_sync_mode_t mode;
+ // Camera Info
+ camera_info cam_info;
+ // Reference to HWI
+ QCamera2HardwareInterface *hwi;
+ // Reference to camera device structure
+ camera_device_t* dev;
+} qcamera_physical_descriptor_t;
+
+/* Struct@ qcamera_logical_descriptor_t
+ *
+ * Description@ This structure stores information about logical cameras
+ * and corresponding data of the physical camera that are part of
+ * this logical camera
+ */
+typedef struct {
+ // Camera Device to be shared to Frameworks
+ camera_device_t dev;
+ // Device version
+ uint32_t device_version;
+ // Logical Camera ID
+ uint32_t id;
+ // Logical Camera Facing
+ int32_t facing;
+ // Number of Physical camera present in this logical camera
+ uint32_t numCameras;
+ // To signify if the LINK/UNLINK established between physical cameras
+ bool bSyncOn;
+ // index of the primary physical camera session in the bundle
+ uint8_t nPrimaryPhyCamIndex;
+ // Signifies Physical Camera ID of each camera
+ uint32_t pId[MAX_NUM_CAMERA_PER_BUNDLE];
+ // Signifies server camera ID of each camera
+ uint32_t sId[MAX_NUM_CAMERA_PER_BUNDLE];
+ // Signifies type of each camera
+ cam_sync_type_t type[MAX_NUM_CAMERA_PER_BUNDLE];
+ // Signifies mode of each camera
+ cam_sync_mode_t mode[MAX_NUM_CAMERA_PER_BUNDLE];
+} qcamera_logical_descriptor_t;
+
+/* Struct@ cam_compose_jpeg_info_t
+ *
+ * Description@ This structure stores information about individual Jpeg images
+ * received from multiple related physical camera instances. These images would then be
+ * composed together into a single MPO image later.
+ */
+typedef struct {
+ // msg_type is same as data callback msg_type
+ int32_t msg_type;
+ // ptr to actual data buffer
+ camera_memory_t *buffer;
+ // index of the buffer same as received in data callback
+ unsigned int index;
+ // metadata associated with the buffer
+ camera_frame_metadata_t *metadata;
+ // user contains the caller's identity
+ // this contains a reference to the physical cam structure
+ // of the HWI instance which had requested for this data buffer
+ void *user;
+ // this indicates validity of the buffer
+ // this flag is used by multiple threads to check validity of
+ // Jpegs received by other threads
+ bool valid;
+ // frame id of the Jpeg. this is needed for frame sync between aux
+ // and main camera sessions
+ uint32_t frame_idx;
+ // release callback function to release this Jpeg memory later after
+ // composition is completed
+ camera_release_callback release_cb;
+ // cookie for the release callback function
+ void *release_cookie;
+ // release data info for what needs to be released
+ void *release_data;
+}cam_compose_jpeg_info_t;
+
+/* Class@ QCameraMuxer
+ *
+ * Description@ Muxer interface
+ * a) Manages the grouping of the physical cameras into a logical camera
+ * b) Muxes the operational calls from Frameworks to HWI
+ * c) Composes MPO from JPEG
+ */
+class QCameraMuxer {
+
+public:
+ /* Public Methods */
+ QCameraMuxer(uint32_t num_of_cameras);
+ virtual ~QCameraMuxer();
+ static void getCameraMuxer(QCameraMuxer** pCamMuxer,
+ uint32_t num_of_cameras);
+ static int get_number_of_cameras();
+ static int get_camera_info(int camera_id, struct camera_info *info);
+ static int set_callbacks(const camera_module_callbacks_t *callbacks);
+ static int open_legacy(const struct hw_module_t* module,
+ const char* id, uint32_t halVersion, struct hw_device_t** device);
+
+ static int camera_device_open(const struct hw_module_t* module,
+ const char* id,
+ struct hw_device_t** device);
+ static int close_camera_device( hw_device_t *);
+
+ /* Operation methods directly accessed by Camera Service */
+ static camera_device_ops_t mCameraMuxerOps;
+
+ /* Start of operational methods */
+ static int set_preview_window(struct camera_device *,
+ struct preview_stream_ops *window);
+ static void set_callBacks(struct camera_device *,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+ static void enable_msg_type(struct camera_device *, int32_t msg_type);
+ static void disable_msg_type(struct camera_device *, int32_t msg_type);
+ static int msg_type_enabled(struct camera_device *, int32_t msg_type);
+ static int start_preview(struct camera_device *);
+ static void stop_preview(struct camera_device *);
+ static int preview_enabled(struct camera_device *);
+ static int store_meta_data_in_buffers(struct camera_device *,
+ int enable);
+ static int start_recording(struct camera_device *);
+ static void stop_recording(struct camera_device *);
+ static int recording_enabled(struct camera_device *);
+ static void release_recording_frame(struct camera_device *,
+ const void *opaque);
+ static int auto_focus(struct camera_device *);
+ static int cancel_auto_focus(struct camera_device *);
+ static int take_picture(struct camera_device *);
+ static int cancel_picture(struct camera_device *);
+ static int set_parameters(struct camera_device *, const char *parms);
+ static char* get_parameters(struct camera_device *);
+ static void put_parameters(struct camera_device *, char *);
+ static int send_command(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+ static void release(struct camera_device *);
+ static int dump(struct camera_device *, int fd);
+ /* End of operational methods */
+
+ static void jpeg_data_callback(int32_t msg_type,
+ const camera_memory_t *data, unsigned int index,
+ camera_frame_metadata_t *metadata, void *user,
+ uint32_t frame_idx, camera_release_callback release_cb,
+ void *release_cookie, void *release_data);
+ // add notify error msgs to the notifer queue of the primary related cam instance
+ static int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+ // function to compose all JPEG images from all physical related camera instances
+ void composeMpo(cam_compose_jpeg_info_t* main_Jpeg,
+ cam_compose_jpeg_info_t* aux_Jpeg);
+ static void* composeMpoRoutine(void* data);
+ static bool matchFrameId(void *data, void *user_data, void *match_data);
+ static bool findPreviousJpegs(void *data, void *user_data, void *match_data);
+ static void releaseJpegInfo(void *data, void *user_data);
+
+public:
+ /* Public Members Variables */
+ // Jpeg and Mpo ops need to be shared between 2 HWI instances
+ // hence these are cached in the muxer alongwith Jpeg handle
+ mm_jpeg_ops_t mJpegOps;
+ mm_jpeg_mpo_ops_t mJpegMpoOps;
+ uint32_t mJpegClientHandle;
+ // Stores Camera Data Callback function
+ camera_data_callback mDataCb;
+ // Stores Camera GetMemory Callback function
+ camera_request_memory mGetMemoryCb;
+
+private:
+ /* Private Member Variables */
+ qcamera_physical_descriptor_t *m_pPhyCamera;
+ qcamera_logical_descriptor_t *m_pLogicalCamera;
+ const camera_module_callbacks_t *m_pCallbacks;
+ bool m_bAuxCameraExposed;
+ uint8_t m_nPhyCameras;
+ uint8_t m_nLogicalCameras;
+
+ // Main Camera session Jpeg Queue
+ QCameraQueue m_MainJpegQ;
+ // Aux Camera session Jpeg Queue
+ QCameraQueue m_AuxJpegQ;
+ // thread for mpo composition
+ QCameraCmdThread m_ComposeMpoTh;
+ // Final Mpo Jpeg Buffer
+ camera_memory_t *m_pRelCamMpoJpeg;
+ // Lock needed to synchronize between multiple composition requests
+ pthread_mutex_t m_JpegLock;
+ // this callback cookie would be used for sending Final mpo Jpeg to the framework
+ void *m_pMpoCallbackCookie;
+ // this callback cookie would be used for caching main related cam phy instance
+ // this is needed for error scenarios
+ // incase of error, we use this cookie to get HWI instance and send errors in notify cb
+ void *m_pJpegCallbackCookie;
+ // flag to indicate whether we need to dump dual camera snapshots
+ bool m_bDumpImages;
+ // flag to indicate whether MPO is enabled or not
+ bool m_bMpoEnabled;
+ // Signifies if frame sync is enabled
+ bool m_bFrameSyncEnabled;
+ // flag to indicate whether recording hint is internally set.
+ bool m_bRecordingHintInternallySet;
+
+ /* Private Member Methods */
+ int setupLogicalCameras();
+ int cameraDeviceOpen(int camera_id, struct hw_device_t **hw_device);
+ int getNumberOfCameras();
+ int getCameraInfo(int camera_id, struct camera_info *info,
+ cam_sync_type_t *p_cam_type);
+ int32_t setCallbacks(const camera_module_callbacks_t *callbacks);
+ int32_t setDataCallback(camera_data_callback data_cb);
+ int32_t setMemoryCallback(camera_request_memory get_memory);
+ qcamera_logical_descriptor_t* getLogicalCamera(
+ struct camera_device * device);
+ qcamera_physical_descriptor_t* getPhysicalCamera(
+ qcamera_logical_descriptor_t* log_cam, uint32_t index);
+ int32_t getActiveNumOfPhyCam(
+ qcamera_logical_descriptor_t* log_cam, int& numOfAcitvePhyCam);
+ int32_t setMpoCallbackCookie(void* mpoCbCookie);
+ void* getMpoCallbackCookie();
+ int32_t setMainJpegCallbackCookie(void* jpegCbCookie);
+ void* getMainJpegCallbackCookie();
+ void setJpegHandle(uint32_t handle) { mJpegClientHandle = handle;};
+ // function to store single JPEG from 1 related physical camera instance
+ int32_t storeJpeg(cam_sync_type_t cam_type, int32_t msg_type,
+ const camera_memory_t *data, unsigned int index,
+ camera_frame_metadata_t *metadata, void *user,
+ uint32_t frame_idx, camera_release_callback release_cb,
+ void *release_cookie, void *release_data);
+
+};// End namespace qcamera
+
+}
+#endif /* __QCAMERAMUXER_H__ */
+
diff --git a/camera/QCamera2/HAL/QCameraParameters.cpp b/camera/QCamera2/HAL/QCameraParameters.cpp
new file mode 100644
index 0000000..d5eb844
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraParameters.cpp
@@ -0,0 +1,14523 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraParameters"
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <math.h>
+#include <string.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#define SYSINFO_H <SYSTEM_HEADER_PREFIX/sysinfo.h>
+#include SYSINFO_H
+#include "gralloc_priv.h"
+#include "graphics.h"
+
+// Camera dependencies
+#include "QCameraBufferMaps.h"
+#include "QCamera2HWI.h"
+#include "QCameraParameters.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define PI 3.14159265
+#define ASPECT_TOLERANCE 0.001
+#define CAMERA_DEFAULT_LONGSHOT_STAGES 4
+#define CAMERA_MIN_LONGSHOT_STAGES 2
+#define FOCUS_PERCISION 0.0000001
+
+
+namespace qcamera {
+// Parameter keys to communicate between camera application and driver.
+const char QCameraParameters::KEY_QC_SUPPORTED_HFR_SIZES[] = "hfr-size-values";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_MODE[] = "preview-frame-rate-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[] = "preview-frame-rate-modes";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[] = "frame-rate-auto";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[] = "frame-rate-fixed";
+const char QCameraParameters::KEY_QC_TOUCH_AF_AEC[] = "touch-af-aec";
+const char QCameraParameters::KEY_QC_SUPPORTED_TOUCH_AF_AEC[] = "touch-af-aec-values";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AEC[] = "touch-index-aec";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AF[] = "touch-index-af";
+const char QCameraParameters::KEY_QC_SCENE_DETECT[] = "scene-detect";
+const char QCameraParameters::KEY_QC_SUPPORTED_SCENE_DETECT[] = "scene-detect-values";
+const char QCameraParameters::KEY_QC_ISO_MODE[] = "iso";
+const char QCameraParameters::KEY_QC_CONTINUOUS_ISO[] = "continuous-iso";
+const char QCameraParameters::KEY_QC_MIN_ISO[] = "min-iso";
+const char QCameraParameters::KEY_QC_MAX_ISO[] = "max-iso";
+const char QCameraParameters::KEY_QC_SUPPORTED_ISO_MODES[] = "iso-values";
+const char QCameraParameters::KEY_QC_EXPOSURE_TIME[] = "exposure-time";
+const char QCameraParameters::KEY_QC_MIN_EXPOSURE_TIME[] = "min-exposure-time";
+const char QCameraParameters::KEY_QC_MAX_EXPOSURE_TIME[] = "max-exposure-time";
+const char QCameraParameters::KEY_QC_CURRENT_EXPOSURE_TIME[] = "cur-exposure-time";
+const char QCameraParameters::KEY_QC_CURRENT_ISO[] = "cur-iso";
+const char QCameraParameters::KEY_QC_LENSSHADE[] = "lensshade";
+const char QCameraParameters::KEY_QC_SUPPORTED_LENSSHADE_MODES[] = "lensshade-values";
+const char QCameraParameters::KEY_QC_AUTO_EXPOSURE[] = "auto-exposure";
+const char QCameraParameters::KEY_QC_SUPPORTED_AUTO_EXPOSURE[] = "auto-exposure-values";
+const char QCameraParameters::KEY_QC_DENOISE[] = "denoise";
+const char QCameraParameters::KEY_QC_SUPPORTED_DENOISE[] = "denoise-values";
+const char QCameraParameters::KEY_QC_FOCUS_ALGO[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_SUPPORTED_FOCUS_ALGOS[] = "selectable-zone-af-values";
+const char QCameraParameters::KEY_QC_MANUAL_FOCUS_POSITION[] = "manual-focus-position";
+const char QCameraParameters::KEY_QC_MANUAL_FOCUS_POS_TYPE[] = "manual-focus-pos-type";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_INDEX[] = "min-focus-pos-index";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_INDEX[] = "max-focus-pos-index";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_DAC[] = "min-focus-pos-dac";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_DAC[] = "max-focus-pos-dac";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_RATIO[] = "min-focus-pos-ratio";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_RATIO[] = "max-focus-pos-ratio";
+const char QCameraParameters::KEY_QC_FOCUS_POSITION_SCALE[] = "cur-focus-scale";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_DIOPTER[] = "min-focus-pos-diopter";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_DIOPTER[] = "max-focus-pos-diopter";
+const char QCameraParameters::KEY_QC_FOCUS_POSITION_DIOPTER[] = "cur-focus-diopter";
+const char QCameraParameters::KEY_QC_FACE_DETECTION[] = "face-detection";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_DETECTION[] = "face-detection-values";
+const char QCameraParameters::KEY_QC_FACE_RECOGNITION[] = "face-recognition";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_RECOGNITION[] = "face-recognition-values";
+const char QCameraParameters::KEY_QC_MEMORY_COLOR_ENHANCEMENT[] = "mce";
+const char QCameraParameters::KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[] = "mce-values";
+const char QCameraParameters::KEY_QC_DIS[] = "dis";
+const char QCameraParameters::KEY_QC_OIS[] = "ois";
+const char QCameraParameters::KEY_QC_SUPPORTED_DIS_MODES[] = "dis-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_OIS_MODES[] = "ois-values";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_FRAME_RATE[] = "video-hfr";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_SPEED_RECORDING[] = "video-hsr";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[] = "video-hfr-values";
+const char QCameraParameters::KEY_QC_REDEYE_REDUCTION[] = "redeye-reduction";
+const char QCameraParameters::KEY_QC_SUPPORTED_REDEYE_REDUCTION[] = "redeye-reduction-values";
+const char QCameraParameters::KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[] = "hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_HDR_IMAGING_MODES[] = "hdr-values";
+const char QCameraParameters::KEY_QC_ZSL[] = "zsl";
+const char QCameraParameters::KEY_QC_SUPPORTED_ZSL_MODES[] = "zsl-values";
+const char QCameraParameters::KEY_QC_ZSL_BURST_INTERVAL[] = "capture-burst-interval";
+const char QCameraParameters::KEY_QC_ZSL_BURST_LOOKBACK[] = "capture-burst-retroactive";
+const char QCameraParameters::KEY_QC_ZSL_QUEUE_DEPTH[] = "capture-burst-queue-depth";
+const char QCameraParameters::KEY_QC_CAMERA_MODE[] = "camera-mode";
+const char QCameraParameters::KEY_QC_AE_BRACKET_HDR[] = "ae-bracket-hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_AE_BRACKET_MODES[] = "ae-bracket-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_RAW_FORMATS[] = "raw-format-values";
+const char QCameraParameters::KEY_QC_RAW_FORMAT[] = "raw-format";
+const char QCameraParameters::KEY_QC_ORIENTATION[] = "orientation";
+const char QCameraParameters::KEY_QC_SELECTABLE_ZONE_AF[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_CAPTURE_BURST_EXPOSURE[] = "capture-burst-exposures";
+const char QCameraParameters::KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[] = "num-snaps-per-shutter";
+const char QCameraParameters::KEY_QC_NUM_RETRO_BURST_PER_SHUTTER[] = "num-retro-burst-per-shutter";
+const char QCameraParameters::KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD[] = "zsl-burst-led-on-period";
+const char QCameraParameters::KEY_QC_NO_DISPLAY_MODE[] = "no-display-mode";
+const char QCameraParameters::KEY_QC_RAW_PICUTRE_SIZE[] = "raw-size";
+const char QCameraParameters::KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] = "skinToneEnhancement-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[] = "supported-live-snapshot-sizes";
+const char QCameraParameters::KEY_QC_SUPPORTED_HDR_NEED_1X[] = "hdr-need-1x-values";
+const char QCameraParameters::KEY_QC_HDR_NEED_1X[] = "hdr-need-1x";
+const char QCameraParameters::KEY_QC_PREVIEW_FLIP[] = "preview-flip";
+const char QCameraParameters::KEY_QC_VIDEO_FLIP[] = "video-flip";
+const char QCameraParameters::KEY_QC_SNAPSHOT_PICTURE_FLIP[] = "snapshot-picture-flip";
+const char QCameraParameters::KEY_QC_SUPPORTED_FLIP_MODES[] = "flip-mode-values";
+const char QCameraParameters::KEY_QC_VIDEO_HDR[] = "video-hdr";
+const char QCameraParameters::KEY_QC_SENSOR_HDR[] = "sensor-hdr";
+const char QCameraParameters::KEY_QC_VT_ENABLE[] = "avtimer";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HDR_MODES[] = "video-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_SENSOR_HDR_MODES[] = "sensor-hdr-values";
+const char QCameraParameters::KEY_QC_AUTO_HDR_ENABLE [] = "auto-hdr-enable";
+const char QCameraParameters::KEY_QC_SNAPSHOT_BURST_NUM[] = "snapshot-burst-num";
+const char QCameraParameters::KEY_QC_SNAPSHOT_FD_DATA[] = "snapshot-fd-data-enable";
+const char QCameraParameters::KEY_QC_TINTLESS_ENABLE[] = "tintless";
+const char QCameraParameters::KEY_QC_SCENE_SELECTION[] = "scene-selection";
+const char QCameraParameters::KEY_QC_CDS_MODE[] = "cds-mode";
+const char QCameraParameters::KEY_QC_VIDEO_CDS_MODE[] = "video-cds-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_CDS_MODES[] = "cds-mode-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_CDS_MODES[] = "video-cds-mode-values";
+const char QCameraParameters::KEY_QC_TNR_MODE[] = "tnr-mode";
+const char QCameraParameters::KEY_QC_VIDEO_TNR_MODE[] = "video-tnr-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_TNR_MODES[] = "tnr-mode-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_TNR_MODES[] = "video-tnr-mode-values";
+const char QCameraParameters::KEY_QC_VIDEO_ROTATION[] = "video-rotation";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_ROTATION_VALUES[] = "video-rotation-values";
+const char QCameraParameters::KEY_QC_AF_BRACKET[] = "af-bracket";
+const char QCameraParameters::KEY_QC_SUPPORTED_AF_BRACKET_MODES[] = "af-bracket-values";
+const char QCameraParameters::KEY_QC_RE_FOCUS[] = "re-focus";
+const char QCameraParameters::KEY_QC_SUPPORTED_RE_FOCUS_MODES[] = "re-focus-values";
+const char QCameraParameters::KEY_QC_CHROMA_FLASH[] = "chroma-flash";
+const char QCameraParameters::KEY_QC_SUPPORTED_CHROMA_FLASH_MODES[] = "chroma-flash-values";
+const char QCameraParameters::KEY_QC_OPTI_ZOOM[] = "opti-zoom";
+const char QCameraParameters::KEY_QC_SEE_MORE[] = "see-more";
+const char QCameraParameters::KEY_QC_STILL_MORE[] = "still-more";
+const char QCameraParameters::KEY_QC_SUPPORTED_OPTI_ZOOM_MODES[] = "opti-zoom-values";
+const char QCameraParameters::KEY_QC_HDR_MODE[] = "hdr-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_KEY_QC_HDR_MODES[] = "hdr-mode-values";
+const char QCameraParameters::KEY_QC_TRUE_PORTRAIT[] = "true-portrait";
+const char QCameraParameters::KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES[] = "true-portrait-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_SEE_MORE_MODES[] = "see-more-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_STILL_MORE_MODES[] = "still-more-values";
+const char QCameraParameters::KEY_INTERNAL_PERVIEW_RESTART[] = "internal-restart";
+const char QCameraParameters::KEY_QC_RDI_MODE[] = "rdi-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_RDI_MODES[] = "rdi-mode-values";
+const char QCameraParameters::KEY_QC_SECURE_MODE[] = "secure-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_SECURE_MODES[] = "secure-mode-values";
+const char QCameraParameters::ISO_HJR[] = "ISO_HJR";
+const char QCameraParameters::KEY_QC_AUTO_HDR_SUPPORTED[] = "auto-hdr-supported";
+const char QCameraParameters::KEY_QC_LONGSHOT_SUPPORTED[] = "longshot-supported";
+const char QCameraParameters::KEY_QC_ZSL_HDR_SUPPORTED[] = "zsl-hdr-supported";
+const char QCameraParameters::KEY_QC_WB_MANUAL_CCT[] = "wb-manual-cct";
+const char QCameraParameters::KEY_QC_MIN_WB_CCT[] = "min-wb-cct";
+const char QCameraParameters::KEY_QC_MAX_WB_CCT[] = "max-wb-cct";
+
+const char QCameraParameters::KEY_QC_MANUAL_WB_GAINS[] = "manual-wb-gains";
+const char QCameraParameters::KEY_QC_MIN_WB_GAIN[] = "min-wb-gain";
+const char QCameraParameters::KEY_QC_MAX_WB_GAIN[] = "max-wb-gain";
+
+const char QCameraParameters::KEY_QC_MANUAL_WB_TYPE[] = "manual-wb-type";
+const char QCameraParameters::KEY_QC_MANUAL_WB_VALUE[] = "manual-wb-value";
+
+const char QCameraParameters::WHITE_BALANCE_MANUAL[] = "manual";
+const char QCameraParameters::FOCUS_MODE_MANUAL_POSITION[] = "manual";
+const char QCameraParameters::KEY_QC_CACHE_VIDEO_BUFFERS[] = "cache-video-buffers";
+
+const char QCameraParameters::KEY_QC_LONG_SHOT[] = "long-shot";
+const char QCameraParameters::KEY_QC_INITIAL_EXPOSURE_INDEX[] = "initial-exp-index";
+const char QCameraParameters::KEY_QC_INSTANT_AEC[] = "instant-aec";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE[] = "instant-capture";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_SUPPORTED_MODES[] = "instant-aec-values";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES[] = "instant-capture-values";
+
+// Values for effect settings.
+const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
+const char QCameraParameters::EFFECT_SKETCH[] = "sketch";
+const char QCameraParameters::EFFECT_NEON[] = "neon";
+const char QCameraParameters::EFFECT_BEAUTY[] = "beauty";
+
+
+// Values for auto exposure settings.
+const char QCameraParameters::TOUCH_AF_AEC_OFF[] = "touch-off";
+const char QCameraParameters::TOUCH_AF_AEC_ON[] = "touch-on";
+
+// Values for scene mode settings.
+const char QCameraParameters::SCENE_MODE_ASD[] = "asd"; // corresponds to CAMERA_BESTSHOT_AUTO in HAL
+const char QCameraParameters::SCENE_MODE_BACKLIGHT[] = "backlight";
+const char QCameraParameters::SCENE_MODE_FLOWERS[] = "flowers";
+const char QCameraParameters::SCENE_MODE_AR[] = "AR";
+const char QCameraParameters::SCENE_MODE_HDR[] = "hdr";
+
+// Formats for setPreviewFormat and setPictureFormat.
+const char QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO[] = "yuv420sp-adreno";
+const char QCameraParameters::PIXEL_FORMAT_YV12[] = "yuv420p";
+const char QCameraParameters::PIXEL_FORMAT_NV12[] = "nv12";
+const char QCameraParameters::QC_PIXEL_FORMAT_NV12_VENUS[] = "nv12-venus";
+
+// Values for raw image formats
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[] = "yuv-raw8-yuyv";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[] = "yuv-raw8-yvyu";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[] = "yuv-raw8-uyvy";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[] = "yuv-raw8-vyuy";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[] = "bayer-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[] = "bayer-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[] = "bayer-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[] = "bayer-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[] = "bayer-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[] = "bayer-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[] = "bayer-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[] = "bayer-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[] = "bayer-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[] = "bayer-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[] = "bayer-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[] = "bayer-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GBRG[] = "bayer-qcom-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GRBG[] = "bayer-qcom-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14RGGB[] = "bayer-qcom-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14BGGR[] = "bayer-qcom-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[] = "bayer-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[] = "bayer-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[] = "bayer-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[] = "bayer-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[] = "bayer-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[] = "bayer-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[] = "bayer-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[] = "bayer-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[] = "bayer-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[] = "bayer-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[] = "bayer-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[] = "bayer-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GBRG[] = "bayer-mipi-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GRBG[] = "bayer-mipi-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14RGGB[] = "bayer-mipi-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14BGGR[] = "bayer-mipi-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[] = "bayer-ideal-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[] = "bayer-ideal-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[] = "bayer-ideal-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[] = "bayer-ideal-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[] = "bayer-ideal-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[] = "bayer-ideal-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[] = "bayer-ideal-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[] = "bayer-ideal-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[] = "bayer-ideal-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[] = "bayer-ideal-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[] = "bayer-ideal-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[] = "bayer-ideal-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GBRG[] = "bayer-ideal-qcom-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GRBG[] = "bayer-ideal-qcom-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14RGGB[] = "bayer-ideal-qcom-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14BGGR[] = "bayer-ideal-qcom-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[] = "bayer-ideal-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[] = "bayer-ideal-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[] = "bayer-ideal-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[] = "bayer-ideal-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[] = "bayer-ideal-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[] = "bayer-ideal-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[] = "bayer-ideal-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[] = "bayer-ideal-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[] = "bayer-ideal-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[] = "bayer-ideal-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[] = "bayer-ideal-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[] = "bayer-ideal-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GBRG[] = "bayer-ideal-mipi-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GRBG[] = "bayer-ideal-mipi-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14RGGB[] = "bayer-ideal-mipi-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14BGGR[] = "bayer-ideal-mipi-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[] = "bayer-ideal-plain8-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[] = "bayer-ideal-plain8-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[] = "bayer-ideal-plain8-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[] = "bayer-ideal-plain8-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[] = "bayer-ideal-plain16-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[] = "bayer-ideal-plain16-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[] = "bayer-ideal-plain16-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[] = "bayer-ideal-plain16-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[] = "bayer-ideal-plain16-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[] = "bayer-ideal-plain16-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[] = "bayer-ideal-plain16-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[] = "bayer-ideal-plain16-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[] = "bayer-ideal-plain16-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[] = "bayer-ideal-plain16-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[] = "bayer-ideal-plain16-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[] = "bayer-ideal-plain16-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GBRG[] = "bayer-ideal-plain16-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GRBG[] = "bayer-ideal-plain16-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14RGGB[] = "bayer-ideal-plain16-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14BGGR[] = "bayer-ideal-plain16-14bggr";
+
+// Values for ISO Settings
+const char QCameraParameters::ISO_AUTO[] = "auto";
+const char QCameraParameters::ISO_100[] = "ISO100";
+const char QCameraParameters::ISO_200[] = "ISO200";
+const char QCameraParameters::ISO_400[] = "ISO400";
+const char QCameraParameters::ISO_800[] = "ISO800";
+const char QCameraParameters::ISO_1600[] = "ISO1600";
+const char QCameraParameters::ISO_3200[] = "ISO3200";
+const char QCameraParameters::ISO_MANUAL[] = "manual";
+
+
+// Values for auto exposure settings.
+const char QCameraParameters::AUTO_EXPOSURE_FRAME_AVG[] = "frame-average";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SMART_METERING[] = "smart-metering";
+const char QCameraParameters::AUTO_EXPOSURE_USER_METERING[] = "user-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING_ADV[] = "spot-metering-adv";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[] = "center-weighted-adv";
+
+// Values for instant AEC modes
+const char QCameraParameters::KEY_QC_INSTANT_AEC_DISABLE[] = "0";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC[] = "1";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_FAST_AEC[] = "2";
+
+// Values for instant capture modes
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_DISABLE[] = "0";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC[] = "1";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_FAST_AEC[] = "2";
+
+const char QCameraParameters::KEY_QC_GPS_LATITUDE_REF[] = "gps-latitude-ref";
+const char QCameraParameters::KEY_QC_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
+const char QCameraParameters::KEY_QC_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
+const char QCameraParameters::KEY_QC_GPS_STATUS[] = "gps-status";
+
+const char QCameraParameters::KEY_QC_HISTOGRAM[] = "histogram";
+const char QCameraParameters::KEY_QC_SUPPORTED_HISTOGRAM_MODES[] = "histogram-values";
+
+const char QCameraParameters::VALUE_ENABLE[] = "enable";
+const char QCameraParameters::VALUE_DISABLE[] = "disable";
+const char QCameraParameters::VALUE_OFF[] = "off";
+const char QCameraParameters::VALUE_ON[] = "on";
+const char QCameraParameters::VALUE_TRUE[] = "true";
+const char QCameraParameters::VALUE_FALSE[] = "false";
+
+const char QCameraParameters::VALUE_FAST[] = "fast";
+const char QCameraParameters::VALUE_HIGH_QUALITY[] = "high-quality";
+
+const char QCameraParameters::KEY_QC_SHARPNESS[] = "sharpness";
+const char QCameraParameters::KEY_QC_MIN_SHARPNESS[] = "min-sharpness";
+const char QCameraParameters::KEY_QC_MAX_SHARPNESS[] = "max-sharpness";
+const char QCameraParameters::KEY_QC_SHARPNESS_STEP[] = "sharpness-step";
+const char QCameraParameters::KEY_QC_CONTRAST[] = "contrast";
+const char QCameraParameters::KEY_QC_MIN_CONTRAST[] = "min-contrast";
+const char QCameraParameters::KEY_QC_MAX_CONTRAST[] = "max-contrast";
+const char QCameraParameters::KEY_QC_CONTRAST_STEP[] = "contrast-step";
+const char QCameraParameters::KEY_QC_SATURATION[] = "saturation";
+const char QCameraParameters::KEY_QC_MIN_SATURATION[] = "min-saturation";
+const char QCameraParameters::KEY_QC_MAX_SATURATION[] = "max-saturation";
+const char QCameraParameters::KEY_QC_SATURATION_STEP[] = "saturation-step";
+const char QCameraParameters::KEY_QC_BRIGHTNESS[] = "luma-adaptation";
+const char QCameraParameters::KEY_QC_MIN_BRIGHTNESS[] = "min-brightness";
+const char QCameraParameters::KEY_QC_MAX_BRIGHTNESS[] = "max-brightness";
+const char QCameraParameters::KEY_QC_BRIGHTNESS_STEP[] = "brightness-step";
+const char QCameraParameters::KEY_QC_SCE_FACTOR[] = "skinToneEnhancement";
+const char QCameraParameters::KEY_QC_MIN_SCE_FACTOR[] = "min-sce-factor";
+const char QCameraParameters::KEY_QC_MAX_SCE_FACTOR[] = "max-sce-factor";
+const char QCameraParameters::KEY_QC_SCE_FACTOR_STEP[] = "sce-factor-step";
+
+const char QCameraParameters::KEY_QC_MAX_NUM_REQUESTED_FACES[] = "qc-max-num-requested-faces";
+
+//Values for DENOISE
+const char QCameraParameters::DENOISE_OFF[] = "denoise-off";
+const char QCameraParameters::DENOISE_ON[] = "denoise-on";
+
+// Values for selectable zone af Settings
+const char QCameraParameters::FOCUS_ALGO_AUTO[] = "auto";
+const char QCameraParameters::FOCUS_ALGO_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::FOCUS_ALGO_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::FOCUS_ALGO_FRAME_AVERAGE[] = "frame-average";
+
+// Values for HFR settings.
+const char QCameraParameters::VIDEO_HFR_OFF[] = "off";
+const char QCameraParameters::VIDEO_HFR_2X[] = "60";
+const char QCameraParameters::VIDEO_HFR_3X[] = "90";
+const char QCameraParameters::VIDEO_HFR_4X[] = "120";
+const char QCameraParameters::VIDEO_HFR_5X[] = "150";
+const char QCameraParameters::VIDEO_HFR_6X[] = "180";
+const char QCameraParameters::VIDEO_HFR_7X[] = "210";
+const char QCameraParameters::VIDEO_HFR_8X[] = "240";
+const char QCameraParameters::VIDEO_HFR_9X[] = "480";
+
+// Values for HDR Bracketing settings.
+const char QCameraParameters::AE_BRACKET_OFF[] = "Off";
+const char QCameraParameters::AE_BRACKET[] = "AE-Bracket";
+
+// Values for AF Bracketing setting.
+const char QCameraParameters::AF_BRACKET_OFF[] = "af-bracket-off";
+const char QCameraParameters::AF_BRACKET_ON[] = "af-bracket-on";
+
+// Values for Refocus setting.
+const char QCameraParameters::RE_FOCUS_OFF[] = "re-focus-off";
+const char QCameraParameters::RE_FOCUS_ON[] = "re-focus-on";
+
+// Values for Chroma Flash setting.
+const char QCameraParameters::CHROMA_FLASH_OFF[] = "chroma-flash-off";
+const char QCameraParameters::CHROMA_FLASH_ON[] = "chroma-flash-on";
+
+// Values for Opti Zoom setting.
+const char QCameraParameters::OPTI_ZOOM_OFF[] = "opti-zoom-off";
+const char QCameraParameters::OPTI_ZOOM_ON[] = "opti-zoom-on";
+
+// Values for Still More setting.
+const char QCameraParameters::STILL_MORE_OFF[] = "still-more-off";
+const char QCameraParameters::STILL_MORE_ON[] = "still-more-on";
+
+// Values for HDR mode setting.
+const char QCameraParameters::HDR_MODE_SENSOR[] = "hdr-mode-sensor";
+const char QCameraParameters::HDR_MODE_MULTI_FRAME[] = "hdr-mode-multiframe";
+
+// Values for True Portrait setting.
+const char QCameraParameters::TRUE_PORTRAIT_OFF[] = "true-portrait-off";
+const char QCameraParameters::TRUE_PORTRAIT_ON[] = "true-portrait-on";
+
+// Values for FLIP settings.
+const char QCameraParameters::FLIP_MODE_OFF[] = "off";
+const char QCameraParameters::FLIP_MODE_V[] = "flip-v";
+const char QCameraParameters::FLIP_MODE_H[] = "flip-h";
+const char QCameraParameters::FLIP_MODE_VH[] = "flip-vh";
+
+const char QCameraParameters::CDS_MODE_OFF[] = "off";
+const char QCameraParameters::CDS_MODE_ON[] = "on";
+const char QCameraParameters::CDS_MODE_AUTO[] = "auto";
+
+// Values for video rotation settings.
+const char QCameraParameters::VIDEO_ROTATION_0[] = "0";
+const char QCameraParameters::VIDEO_ROTATION_90[] = "90";
+const char QCameraParameters::VIDEO_ROTATION_180[] = "180";
+const char QCameraParameters::VIDEO_ROTATION_270[] = "270";
+
+const char QCameraParameters::KEY_QC_SUPPORTED_MANUAL_FOCUS_MODES[] = "manual-focus-modes";
+const char QCameraParameters::KEY_QC_SUPPORTED_MANUAL_EXPOSURE_MODES[] = "manual-exposure-modes";
+const char QCameraParameters::KEY_QC_SUPPORTED_MANUAL_WB_MODES[] = "manual-wb-modes";
+const char QCameraParameters::KEY_QC_FOCUS_SCALE_MODE[] = "scale-mode";
+const char QCameraParameters::KEY_QC_FOCUS_DIOPTER_MODE[] = "diopter-mode";
+const char QCameraParameters::KEY_QC_ISO_PRIORITY[] = "iso-priority";
+const char QCameraParameters::KEY_QC_EXP_TIME_PRIORITY[] = "exp-time-priority";
+const char QCameraParameters::KEY_QC_USER_SETTING[] = "user-setting";
+const char QCameraParameters::KEY_QC_WB_CCT_MODE[] = "color-temperature";
+const char QCameraParameters::KEY_QC_WB_GAIN_MODE[] = "rbgb-gains";
+const char QCameraParameters::KEY_QC_NOISE_REDUCTION_MODE[] = "noise-reduction-mode";
+const char QCameraParameters::KEY_QC_NOISE_REDUCTION_MODE_VALUES[] = "noise-reduction-mode-values";
+
+#ifdef TARGET_TS_MAKEUP
+const char QCameraParameters::KEY_TS_MAKEUP[] = "tsmakeup";
+const char QCameraParameters::KEY_TS_MAKEUP_WHITEN[] = "tsmakeup_whiten";
+const char QCameraParameters::KEY_TS_MAKEUP_CLEAN[] = "tsmakeup_clean";
+#endif
+
+//KEY to share HFR batch size with video encoder.
+const char QCameraParameters::KEY_QC_VIDEO_BATCH_SIZE[] = "video-batch-size";
+
+static const char* portrait = "portrait";
+static const char* landscape = "landscape";
+
+const cam_dimension_t QCameraParameters::THUMBNAIL_SIZES_MAP[] = {
+ { 256, 154 }, //1.66233
+ { 240, 160 }, //1.5
+ { 320, 320 }, //1.0
+ { 320, 240 }, //1.33333
+ { 256, 144 }, //1.777778
+ { 240, 144 }, //1.666667
+ { 176, 144 }, //1.222222
+ /*Thumbnail sizes to match portrait picture size aspect ratio*/
+ { 240, 320 }, //to match 480X640 & 240X320 picture size
+ { 144, 176 }, //to match 144X176 picture size
+ { 0, 0 } // required by Android SDK
+};
+
+const QCameraParameters::QCameraMap<cam_auto_exposure_mode_type>
+ QCameraParameters::AUTO_EXPOSURE_MAP[] = {
+ { AUTO_EXPOSURE_FRAME_AVG, CAM_AEC_MODE_FRAME_AVERAGE },
+ { AUTO_EXPOSURE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
+ { AUTO_EXPOSURE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
+ { AUTO_EXPOSURE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
+ { AUTO_EXPOSURE_USER_METERING, CAM_AEC_MODE_USER_METERING },
+ { AUTO_EXPOSURE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
+ { AUTO_EXPOSURE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
+};
+
+const QCameraParameters::QCameraMap<cam_aec_convergence_type>
+ QCameraParameters::INSTANT_AEC_MODES_MAP[] = {
+ { KEY_QC_INSTANT_AEC_DISABLE, CAM_AEC_NORMAL_CONVERGENCE },
+ { KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC, CAM_AEC_AGGRESSIVE_CONVERGENCE },
+ { KEY_QC_INSTANT_AEC_FAST_AEC, CAM_AEC_FAST_CONVERGENCE },
+};
+
+const QCameraParameters::QCameraMap<cam_aec_convergence_type>
+ QCameraParameters::INSTANT_CAPTURE_MODES_MAP[] = {
+ { KEY_QC_INSTANT_CAPTURE_DISABLE, CAM_AEC_NORMAL_CONVERGENCE },
+ { KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC, CAM_AEC_AGGRESSIVE_CONVERGENCE },
+ { KEY_QC_INSTANT_CAPTURE_FAST_AEC, CAM_AEC_FAST_CONVERGENCE },
+};
+
+const QCameraParameters::QCameraMap<cam_format_t>
+ QCameraParameters::PREVIEW_FORMATS_MAP[] = {
+ {PIXEL_FORMAT_YUV420SP, CAM_FORMAT_YUV_420_NV21},
+ {PIXEL_FORMAT_YUV420P, CAM_FORMAT_YUV_420_YV12},
+ {PIXEL_FORMAT_YUV420SP_ADRENO, CAM_FORMAT_YUV_420_NV21_ADRENO},
+ {PIXEL_FORMAT_YV12, CAM_FORMAT_YUV_420_YV12},
+ {PIXEL_FORMAT_NV12, CAM_FORMAT_YUV_420_NV12},
+ {QC_PIXEL_FORMAT_NV12_VENUS, CAM_FORMAT_YUV_420_NV12_VENUS}
+};
+
+const QCameraParameters::QCameraMap<cam_format_t>
+ QCameraParameters::PICTURE_TYPES_MAP[] = {
+ {PIXEL_FORMAT_JPEG, CAM_FORMAT_JPEG},
+ {PIXEL_FORMAT_YUV420SP, CAM_FORMAT_YUV_420_NV21},
+ {PIXEL_FORMAT_YUV422SP, CAM_FORMAT_YUV_422_NV16},
+ {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV, CAM_FORMAT_YUV_RAW_8BIT_YUYV},
+ {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU, CAM_FORMAT_YUV_RAW_8BIT_YVYU},
+ {QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY, CAM_FORMAT_YUV_RAW_8BIT_UYVY},
+ {QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY, CAM_FORMAT_YUV_RAW_8BIT_VYUY},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG, CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG, CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB, CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR, CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG, CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG, CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB, CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR, CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG, CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG, CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB, CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR, CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GBRG, CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GRBG, CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14RGGB, CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14BGGR, CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG, CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG, CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB, CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR, CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG, CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG, CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB, CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR, CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG, CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG, CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB, CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR, CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GBRG, CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GRBG, CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14RGGB, CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14BGGR, CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB},
+ {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR}
+};
+
+const QCameraParameters::QCameraMap<cam_focus_mode_type>
+ QCameraParameters::FOCUS_MODES_MAP[] = {
+ { FOCUS_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
+ { FOCUS_MODE_INFINITY, CAM_FOCUS_MODE_INFINITY },
+ { FOCUS_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
+ { FOCUS_MODE_FIXED, CAM_FOCUS_MODE_FIXED },
+ { FOCUS_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
+ { FOCUS_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
+ { FOCUS_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO },
+ { FOCUS_MODE_MANUAL_POSITION, CAM_FOCUS_MODE_MANUAL},
+};
+
+const QCameraParameters::QCameraMap<cam_effect_mode_type>
+ QCameraParameters::EFFECT_MODES_MAP[] = {
+ { EFFECT_NONE, CAM_EFFECT_MODE_OFF },
+ { EFFECT_MONO, CAM_EFFECT_MODE_MONO },
+ { EFFECT_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
+ { EFFECT_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
+ { EFFECT_SEPIA, CAM_EFFECT_MODE_SEPIA },
+ { EFFECT_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
+ { EFFECT_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
+ { EFFECT_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
+ { EFFECT_AQUA, CAM_EFFECT_MODE_AQUA },
+ { EFFECT_EMBOSS, CAM_EFFECT_MODE_EMBOSS },
+ { EFFECT_SKETCH, CAM_EFFECT_MODE_SKETCH },
+ { EFFECT_NEON, CAM_EFFECT_MODE_NEON },
+ { EFFECT_BEAUTY, CAM_EFFECT_MODE_BEAUTY }
+};
+
+const QCameraParameters::QCameraMap<cam_scene_mode_type>
+ QCameraParameters::SCENE_MODES_MAP[] = {
+ { SCENE_MODE_AUTO, CAM_SCENE_MODE_OFF },
+ { SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
+ { SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
+ { SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
+ { SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
+ { SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
+ { SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
+ { SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
+ { SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
+ { SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
+ { SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
+ { SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
+ { SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
+ { SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
+ { SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
+ { SCENE_MODE_ASD, CAM_SCENE_MODE_AUTO },
+ { SCENE_MODE_BACKLIGHT, CAM_SCENE_MODE_BACKLIGHT },
+ { SCENE_MODE_FLOWERS, CAM_SCENE_MODE_FLOWERS },
+ { SCENE_MODE_AR, CAM_SCENE_MODE_AR },
+ { SCENE_MODE_HDR, CAM_SCENE_MODE_HDR },
+};
+
+const QCameraParameters::QCameraMap<cam_flash_mode_t>
+ QCameraParameters::FLASH_MODES_MAP[] = {
+ { FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
+ { FLASH_MODE_AUTO, CAM_FLASH_MODE_AUTO },
+ { FLASH_MODE_ON, CAM_FLASH_MODE_ON },
+ { FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
+};
+
+const QCameraParameters::QCameraMap<cam_focus_algorithm_type>
+ QCameraParameters::FOCUS_ALGO_MAP[] = {
+ { FOCUS_ALGO_AUTO, CAM_FOCUS_ALGO_AUTO },
+ { FOCUS_ALGO_SPOT_METERING, CAM_FOCUS_ALGO_SPOT },
+ { FOCUS_ALGO_CENTER_WEIGHTED, CAM_FOCUS_ALGO_CENTER_WEIGHTED },
+ { FOCUS_ALGO_FRAME_AVERAGE, CAM_FOCUS_ALGO_AVERAGE }
+};
+
+const QCameraParameters::QCameraMap<cam_wb_mode_type>
+ QCameraParameters::WHITE_BALANCE_MODES_MAP[] = {
+ { WHITE_BALANCE_AUTO, CAM_WB_MODE_AUTO },
+ { WHITE_BALANCE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
+ { WHITE_BALANCE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
+ { WHITE_BALANCE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
+ { WHITE_BALANCE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
+ { WHITE_BALANCE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
+ { WHITE_BALANCE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
+ { WHITE_BALANCE_SHADE, CAM_WB_MODE_SHADE },
+ { WHITE_BALANCE_MANUAL, CAM_WB_MODE_MANUAL},
+};
+
+const QCameraParameters::QCameraMap<cam_antibanding_mode_type>
+ QCameraParameters::ANTIBANDING_MODES_MAP[] = {
+ { ANTIBANDING_OFF, CAM_ANTIBANDING_MODE_OFF },
+ { ANTIBANDING_50HZ, CAM_ANTIBANDING_MODE_50HZ },
+ { ANTIBANDING_60HZ, CAM_ANTIBANDING_MODE_60HZ },
+ { ANTIBANDING_AUTO, CAM_ANTIBANDING_MODE_AUTO }
+};
+
+const QCameraParameters::QCameraMap<cam_iso_mode_type>
+ QCameraParameters::ISO_MODES_MAP[] = {
+ { ISO_AUTO, CAM_ISO_MODE_AUTO },
+ { ISO_HJR, CAM_ISO_MODE_DEBLUR },
+ { ISO_100, CAM_ISO_MODE_100 },
+ { ISO_200, CAM_ISO_MODE_200 },
+ { ISO_400, CAM_ISO_MODE_400 },
+ { ISO_800, CAM_ISO_MODE_800 },
+ { ISO_1600, CAM_ISO_MODE_1600 },
+ { ISO_3200, CAM_ISO_MODE_3200 }
+};
+
+const QCameraParameters::QCameraMap<cam_hfr_mode_t>
+ QCameraParameters::HFR_MODES_MAP[] = {
+ { VIDEO_HFR_OFF, CAM_HFR_MODE_OFF },
+ { VIDEO_HFR_2X, CAM_HFR_MODE_60FPS },
+ { VIDEO_HFR_3X, CAM_HFR_MODE_90FPS },
+ { VIDEO_HFR_4X, CAM_HFR_MODE_120FPS },
+ { VIDEO_HFR_5X, CAM_HFR_MODE_150FPS },
+ { VIDEO_HFR_6X, CAM_HFR_MODE_180FPS },
+ { VIDEO_HFR_7X, CAM_HFR_MODE_210FPS },
+ { VIDEO_HFR_8X, CAM_HFR_MODE_240FPS },
+ { VIDEO_HFR_9X, CAM_HFR_MODE_480FPS }
+};
+
+const QCameraParameters::QCameraMap<cam_bracket_mode>
+ QCameraParameters::BRACKETING_MODES_MAP[] = {
+ { AE_BRACKET_OFF, CAM_EXP_BRACKETING_OFF },
+ { AE_BRACKET, CAM_EXP_BRACKETING_ON }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::ON_OFF_MODES_MAP[] = {
+ { VALUE_OFF, 0 },
+ { VALUE_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::TOUCH_AF_AEC_MODES_MAP[] = {
+ { QCameraParameters::TOUCH_AF_AEC_OFF, 0 },
+ { QCameraParameters::TOUCH_AF_AEC_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::ENABLE_DISABLE_MODES_MAP[] = {
+ { VALUE_ENABLE, 1 },
+ { VALUE_DISABLE, 0 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::DENOISE_ON_OFF_MODES_MAP[] = {
+ { DENOISE_OFF, 0 },
+ { DENOISE_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::TRUE_FALSE_MODES_MAP[] = {
+ { VALUE_FALSE, 0},
+ { VALUE_TRUE, 1}
+};
+
+const QCameraParameters::QCameraMap<cam_flip_t>
+ QCameraParameters::FLIP_MODES_MAP[] = {
+ {FLIP_MODE_OFF, FLIP_NONE},
+ {FLIP_MODE_V, FLIP_V},
+ {FLIP_MODE_H, FLIP_H},
+ {FLIP_MODE_VH, FLIP_V_H}
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::AF_BRACKETING_MODES_MAP[] = {
+ { AF_BRACKET_OFF, 0 },
+ { AF_BRACKET_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::RE_FOCUS_MODES_MAP[] = {
+ { RE_FOCUS_OFF, 0 },
+ { RE_FOCUS_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::CHROMA_FLASH_MODES_MAP[] = {
+ { CHROMA_FLASH_OFF, 0 },
+ { CHROMA_FLASH_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::OPTI_ZOOM_MODES_MAP[] = {
+ { OPTI_ZOOM_OFF, 0 },
+ { OPTI_ZOOM_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::TRUE_PORTRAIT_MODES_MAP[] = {
+ { TRUE_PORTRAIT_OFF, 0 },
+ { TRUE_PORTRAIT_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::STILL_MORE_MODES_MAP[] = {
+ { STILL_MORE_OFF, 0 },
+ { STILL_MORE_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<cam_cds_mode_type_t>
+ QCameraParameters::CDS_MODES_MAP[] = {
+ { CDS_MODE_OFF, CAM_CDS_MODE_OFF },
+ { CDS_MODE_ON, CAM_CDS_MODE_ON },
+ { CDS_MODE_AUTO, CAM_CDS_MODE_AUTO}
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::HDR_MODES_MAP[] = {
+ { HDR_MODE_SENSOR, 0 },
+ { HDR_MODE_MULTI_FRAME, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::VIDEO_ROTATION_MODES_MAP[] = {
+ { VIDEO_ROTATION_0, 0 },
+ { VIDEO_ROTATION_90, 90 },
+ { VIDEO_ROTATION_180, 180 },
+ { VIDEO_ROTATION_270, 270 }
+};
+
+const QCameraParameters::QCameraMap<int>
+ QCameraParameters::NOISE_REDUCTION_MODES_MAP[] = {
+ { VALUE_OFF, 0 },
+ { VALUE_FAST, 1 },
+ { VALUE_HIGH_QUALITY, 2 }
+};
+
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+#define TOTAL_RAM_SIZE_512MB 536870912
+#define PARAM_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
+
+/*===========================================================================
+ * FUNCTION : isOEMFeat1PropEnabled
+ *
+ * DESCRIPTION: inline function to check from property if custom feature
+ * is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : boolean true/false
+ *==========================================================================*/
+static inline bool isOEMFeat1PropEnabled()
+{
+ char value[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.imglib.oemfeat1", value, "0");
+ return atoi(value) > 0 ? true : false;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraParameters
+ *
+ * DESCRIPTION: default constructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters()
+ : CameraParameters(),
+ m_reprocScaleParam(),
+ mCommon(),
+ m_pCapability(NULL),
+ m_pCamOpsTbl(NULL),
+ m_pParamHeap(NULL),
+ m_pParamBuf(NULL),
+ m_pRelCamSyncHeap(NULL),
+ m_pRelCamSyncBuf(NULL),
+ m_bFrameSyncEnabled(false),
+ mIsType(IS_TYPE_NONE),
+ mIsTypePreview(IS_TYPE_NONE),
+ m_bZslMode(false),
+ m_bZslMode_new(false),
+ m_bForceZslMode(false),
+ m_bRecordingHint(false),
+ m_bRecordingHint_new(false),
+ m_bHistogramEnabled(false),
+ m_bLongshotEnabled(false),
+ m_nFaceProcMask(0),
+ m_bFaceDetectionOn(0),
+ m_bDebugFps(false),
+ mFocusMode(CAM_FOCUS_MODE_MAX),
+ mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+ mAppPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+ mPictureFormat(CAM_FORMAT_JPEG),
+ m_bNeedRestart(false),
+ m_bNoDisplayMode(false),
+ m_bWNROn(false),
+ m_bTNRPreviewOn(false),
+ m_bTNRVideoOn(false),
+ m_bTNRSnapshotOn(false),
+ m_bInited(false),
+ m_nRetroBurstNum(0),
+ m_nBurstLEDOnPeriod(100),
+ m_bUpdateEffects(false),
+ m_bSceneTransitionAuto(false),
+ m_bPreviewFlipChanged(false),
+ m_bVideoFlipChanged(false),
+ m_bSnapshotFlipChanged(false),
+ m_bFixedFrameRateSet(false),
+ m_bHDREnabled(false),
+ m_bLocalHDREnabled(false),
+ m_bAVTimerEnabled(false),
+ m_bDISEnabled(false),
+ m_MobiMask(0),
+ m_AdjustFPS(NULL),
+ m_bHDR1xFrameEnabled(false),
+ m_HDRSceneEnabled(false),
+ m_bHDRThumbnailProcessNeeded(false),
+ m_bHDR1xExtraBufferNeeded(true),
+ m_bHDROutputCropEnabled(false),
+ m_tempMap(),
+ m_bAFBracketingOn(false),
+ m_bReFocusOn(false),
+ m_bChromaFlashOn(false),
+ m_bOptiZoomOn(false),
+ m_bSceneSelection(false),
+ m_SelectedScene(CAM_SCENE_MODE_MAX),
+ m_bSeeMoreOn(false),
+ m_bStillMoreOn(false),
+ m_bHighQualityNoiseReductionMode(false),
+ m_bHfrMode(false),
+ m_bSensorHDREnabled(false),
+ m_bRdiMode(false),
+ m_bSecureMode(false),
+ m_bAeBracketingEnabled(false),
+ mFlashValue(CAM_FLASH_MODE_OFF),
+ mFlashDaemonValue(CAM_FLASH_MODE_OFF),
+ mHfrMode(CAM_HFR_MODE_OFF),
+ m_bHDRModeSensor(true),
+ mOfflineRAW(false),
+ m_bTruePortraitOn(false),
+ m_bIsLowMemoryDevice(false),
+ mCds_mode(CAM_CDS_MODE_OFF),
+ m_LLCaptureEnabled(FALSE),
+ m_LowLightLevel(CAM_LOW_LIGHT_OFF),
+ m_bLtmForSeeMoreEnabled(false),
+ m_expTime(0),
+ m_isoValue(0),
+ m_ManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF),
+ m_dualLedCalibration(0),
+ m_bInstantAEC(false),
+ m_bInstantCapture(false),
+ mAecFrameBound(0),
+ mAecSkipDisplayFrameBound(0),
+ m_bQuadraCfa(false)
+{
+ char value[PROPERTY_VALUE_MAX];
+ // TODO: may move to parameter instead of sysprop
+ property_get("persist.debug.sf.showfps", value, "0");
+ m_bDebugFps = atoi(value) > 0 ? true : false;
+
+ // For thermal mode, it should be set as system property
+ // because system property applies to all applications, while
+ // parameters only apply to specific app.
+ property_get("persist.camera.thermal.mode", value, "fps");
+ if (!strcmp(value, "frameskip")) {
+ m_ThermalMode = QCAMERA_THERMAL_ADJUST_FRAMESKIP;
+ } else {
+ if (strcmp(value, "fps"))
+ LOGW("Invalid camera thermal mode %s", value);
+ m_ThermalMode = QCAMERA_THERMAL_ADJUST_FPS;
+ }
+
+ memset(value, 0, sizeof(value));
+ // As per Power/Quality evaluation, LTM is enabled by default in SeeMore/StillMore usecase
+ // to improve the quality as there is no much impact to power
+ property_get("persist.camera.ltmforseemore", value, "1");
+ m_bLtmForSeeMoreEnabled = atoi(value);
+
+ memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+ memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
+ memset(&m_hfrFpsRange, 0, sizeof(m_hfrFpsRange));
+ memset(&m_stillmore_config, 0, sizeof(cam_still_more_t));
+ memset(&m_captureFrameConfig, 0, sizeof(cam_capture_frame_config_t));
+ memset(&m_relCamSyncInfo, 0, sizeof(cam_sync_related_sensors_event_info_t));
+ mTotalPPCount = 1;
+ mZoomLevel = 0;
+ mParmZoomLevel = 0;
+ mCurPPCount = 0;
+ mBufBatchCnt = 0;
+ mRotation = 0;
+ mJpegRotation = 0;
+ mVideoBatchSize = 0;
+ m_bOEMFeatEnabled = isOEMFeat1PropEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraParameters
+ *
+ * DESCRIPTION: constructor of QCameraParameters
+ *
+ * PARAMETERS :
+ * @params : parameters in string
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters(const String8 &params)
+ : CameraParameters(params),
+ m_reprocScaleParam(),
+ m_pCapability(NULL),
+ m_pCamOpsTbl(NULL),
+ m_pParamHeap(NULL),
+ m_pParamBuf(NULL),
+ m_pRelCamSyncHeap(NULL),
+ m_pRelCamSyncBuf(NULL),
+ m_bFrameSyncEnabled(false),
+ m_bZslMode(false),
+ m_bZslMode_new(false),
+ m_bForceZslMode(false),
+ m_bRecordingHint(false),
+ m_bRecordingHint_new(false),
+ m_bHistogramEnabled(false),
+ m_bLongshotEnabled(false),
+ m_nFaceProcMask(0),
+ m_bDebugFps(false),
+ mFocusMode(CAM_FOCUS_MODE_MAX),
+ mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+ mAppPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+ mPictureFormat(CAM_FORMAT_JPEG),
+ m_bNeedRestart(false),
+ m_bNoDisplayMode(false),
+ m_bWNROn(false),
+ m_bTNRPreviewOn(false),
+ m_bTNRVideoOn(false),
+ m_bTNRSnapshotOn(false),
+ m_bInited(false),
+ m_nRetroBurstNum(0),
+ m_nBurstLEDOnPeriod(100),
+ m_bPreviewFlipChanged(false),
+ m_bVideoFlipChanged(false),
+ m_bSnapshotFlipChanged(false),
+ m_bFixedFrameRateSet(false),
+ m_bHDREnabled(false),
+ m_bLocalHDREnabled(false),
+ m_bAVTimerEnabled(false),
+ m_AdjustFPS(NULL),
+ m_bHDR1xFrameEnabled(false),
+ m_HDRSceneEnabled(false),
+ m_bHDRThumbnailProcessNeeded(false),
+ m_bHDR1xExtraBufferNeeded(true),
+ m_bHDROutputCropEnabled(false),
+ m_tempMap(),
+ m_bAFBracketingOn(false),
+ m_bReFocusOn(false),
+ m_bChromaFlashOn(false),
+ m_bOptiZoomOn(false),
+ m_bSceneSelection(false),
+ m_SelectedScene(CAM_SCENE_MODE_MAX),
+ m_bSeeMoreOn(false),
+ m_bStillMoreOn(false),
+ m_bHighQualityNoiseReductionMode(false),
+ m_bHfrMode(false),
+ m_bSensorHDREnabled(false),
+ m_bRdiMode(false),
+ m_bSecureMode(false),
+ m_bAeBracketingEnabled(false),
+ mFlashValue(CAM_FLASH_MODE_OFF),
+ mFlashDaemonValue(CAM_FLASH_MODE_OFF),
+ mHfrMode(CAM_HFR_MODE_OFF),
+ m_bHDRModeSensor(true),
+ mOfflineRAW(false),
+ m_bTruePortraitOn(false),
+ m_bIsLowMemoryDevice(false),
+ mCds_mode(CAM_CDS_MODE_OFF),
+ mParmEffect(CAM_EFFECT_MODE_OFF),
+ m_LLCaptureEnabled(FALSE),
+ m_LowLightLevel(CAM_LOW_LIGHT_OFF),
+ m_bLtmForSeeMoreEnabled(false),
+ m_expTime(0),
+ m_isoValue(0),
+ m_ManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF),
+ m_dualLedCalibration(0),
+ m_bInstantAEC(false),
+ m_bInstantCapture(false),
+ mAecFrameBound(0),
+ mAecSkipDisplayFrameBound(0),
+ m_bQuadraCfa(false)
+{
+ memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+ memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
+ memset(&m_hfrFpsRange, 0, sizeof(m_hfrFpsRange));
+ memset(&m_stillmore_config, 0, sizeof(cam_still_more_t));
+ memset(&m_relCamSyncInfo, 0, sizeof(cam_sync_related_sensors_event_info_t));
+ mTotalPPCount = 0;
+ mZoomLevel = 0;
+ mParmZoomLevel = 0;
+ mCurPPCount = 0;
+ mRotation = 0;
+ mJpegRotation = 0;
+ mBufBatchCnt = 0;
+ mVideoBatchSize = 0;
+ m_bOEMFeatEnabled = isOEMFeat1PropEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraParameters
+ *
+ * DESCRIPTION: deconstructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraParameters::~QCameraParameters()
+{
+ deinit();
+}
+
+/*===========================================================================
+ * FUNCTION : createSizesString
+ *
+ * DESCRIPTION: create string obj contains array of dimensions
+ *
+ * PARAMETERS :
+ * @sizes : array of dimensions
+ * @len : size of dimension array
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createSizesString(const cam_dimension_t *sizes, size_t len)
+{
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+ str.append(buffer);
+ }
+ for (size_t i = 1; i < len; i++) {
+ snprintf(buffer, sizeof(buffer), ",%dx%d",
+ sizes[i].width, sizes[i].height);
+ str.append(buffer);
+ }
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : createValuesString
+ *
+ * DESCRIPTION: create string obj contains array of values from map when matched
+ * from input values array
+ *
+ * PARAMETERS :
+ * @values : array of values
+ * @len : size of values array
+ * @map : map contains the mapping between values and enums
+ * @map_len : size of the map
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+template <typename valuesType, class mapType> String8 createValuesString(
+ const valuesType *values, size_t len, const mapType *map, size_t map_len)
+{
+ String8 str;
+ int count = 0;
+
+ for (size_t i = 0; i < len; i++ ) {
+ for (size_t j = 0; j < map_len; j ++)
+ if (map[j].val == values[i]) {
+ if (NULL != map[j].desc) {
+ if (count > 0) {
+ str.append(",");
+ }
+ str.append(map[j].desc);
+ count++;
+ break; //loop j
+ }
+ }
+ }
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : createValuesStringFromMap
+ *
+ * DESCRIPTION: create string obj contains array of values directly from map
+ *
+ * PARAMETERS :
+ * @map : map contains the mapping between values and enums
+ * @map_len : size of the map
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+template <class mapType> String8 createValuesStringFromMap(
+ const mapType *map, size_t map_len)
+{
+ String8 str;
+
+ for (size_t i = 0; i < map_len; i++) {
+ if (NULL != map[i].desc) {
+ if (i > 0) {
+ str.append(",");
+ }
+ str.append(map[i].desc);
+ }
+ }
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : createZoomRatioValuesString
+ *
+ * DESCRIPTION: create string obj contains array of zoom ratio values
+ *
+ * PARAMETERS :
+ * @zoomRaios : array of zoom ratios
+ * @length : size of the array
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createZoomRatioValuesString(uint32_t *zoomRatios,
+ size_t length)
+{
+ String8 str;
+ char buffer[32] = {0};
+
+ if(length > 0){
+ snprintf(buffer, sizeof(buffer), "%d", zoomRatios[0]);
+ str.append(buffer);
+ }
+
+ for (size_t i = 1; i < length; i++) {
+ memset(buffer, 0, sizeof(buffer));
+ snprintf(buffer, sizeof(buffer), ",%d", zoomRatios[i]);
+ str.append(buffer);
+ }
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : createHfrValuesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr values from map when
+ * matched from input hfr values
+ *
+ * PARAMETERS :
+ * @values : array of hfr info
+ * @len : size of the array
+ * @map : map of hfr string value and enum
+ * map_len : size of map
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrValuesString(const cam_hfr_info_t *values,
+ size_t len, const QCameraMap<cam_hfr_mode_t> *map, size_t map_len)
+{
+ String8 str;
+ int count = 0;
+
+ //Create HFR supported size string.
+ for (size_t i = 0; i < len; i++ ) {
+ for (size_t j = 0; j < map_len; j ++) {
+ if (map[j].val == (int)values[i].mode) {
+ if (NULL != map[j].desc) {
+ if (count > 0) {
+ str.append(",");
+ }
+ str.append(map[j].desc);
+ count++;
+ break; //loop j
+ }
+ }
+ }
+ }
+ if (count > 0) {
+ str.append(",");
+ }
+ str.append(VIDEO_HFR_OFF);
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : createHfrSizesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr sizes
+ *
+ * PARAMETERS :
+ * @values : array of hfr info
+ * @len : size of the array
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrSizesString(const cam_hfr_info_t *values, size_t len)
+{
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ snprintf(buffer, sizeof(buffer), "%dx%d",
+ values[0].dim[0].width, values[0].dim[0].height);
+ str.append(buffer);
+ }
+ for (size_t i = 1; i < len; i++) {
+ snprintf(buffer, sizeof(buffer), ",%dx%d",
+ values[i].dim[0].width, values[i].dim[0].height);
+ str.append(buffer);
+ }
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : createFpsString
+ *
+ * DESCRIPTION: create string obj contains array of FPS rates
+ *
+ * PARAMETERS :
+ * @fps : default fps range
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsString(cam_fps_range_t &fps)
+{
+ char buffer[32];
+ String8 fpsValues;
+
+ int min_fps = int(fps.min_fps);
+ int max_fps = int(fps.max_fps);
+
+ if (min_fps < fps.min_fps){
+ min_fps++;
+ }
+ if (max_fps > fps.max_fps) {
+ max_fps--;
+ }
+ if (min_fps <= max_fps) {
+ snprintf(buffer, sizeof(buffer), "%d", min_fps);
+ fpsValues.append(buffer);
+ }
+
+ for (int i = min_fps+1; i <= max_fps; i++) {
+ snprintf(buffer, sizeof(buffer), ",%d", i);
+ fpsValues.append(buffer);
+ }
+
+ return fpsValues;
+}
+
+/*===========================================================================
+ * FUNCTION : createFpsRangeString
+ *
+ * DESCRIPTION: create string obj contains array of FPS ranges
+ *
+ * PARAMETERS :
+ * @fps : array of fps ranges
+ * @len : size of the array
+ * @default_fps_index : reference to index of default fps range
+ *
+ * RETURN : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsRangeString(const cam_fps_range_t* fps,
+ size_t len, int &default_fps_index)
+{
+ String8 str;
+ char buffer[32];
+ int max_range = 0;
+ int min_fps, max_fps;
+
+ if (len > 0) {
+ min_fps = int(fps[0].min_fps * 1000);
+ max_fps = int(fps[0].max_fps * 1000);
+ max_range = max_fps - min_fps;
+ default_fps_index = 0;
+ snprintf(buffer, sizeof(buffer), "(%d,%d)", min_fps, max_fps);
+ str.append(buffer);
+ }
+ for (size_t i = 1; i < len; i++) {
+ min_fps = int(fps[i].min_fps * 1000);
+ max_fps = int(fps[i].max_fps * 1000);
+ if (max_range < (max_fps - min_fps)) {
+ max_range = max_fps - min_fps;
+ default_fps_index = (int)i;
+ }
+ snprintf(buffer, sizeof(buffer), ",(%d,%d)", min_fps, max_fps);
+ str.append(buffer);
+ }
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : lookupAttr
+ *
+ * DESCRIPTION: lookup a value by its name
+ *
+ * PARAMETERS :
+ * @attr : map contains <name, value>
+ * @len : size of the map
+ * @name : name to be looked up
+ *
+ * RETURN : valid value if found
+ * NAME_NOT_FOUND if not found
+ *==========================================================================*/
+template <class mapType> int lookupAttr(const mapType *arr,
+ size_t len, const char *name)
+{
+ if (name) {
+ for (size_t i = 0; i < len; i++) {
+ if (!strcmp(arr[i].desc, name))
+ return arr[i].val;
+ }
+ }
+ return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION : lookupNameByValue
+ *
+ * DESCRIPTION: lookup a name by its value
+ *
+ * PARAMETERS :
+ * @attr : map contains <name, value>
+ * @len : size of the map
+ * @value : value to be looked up
+ *
+ * RETURN : name str or NULL if not found
+ *==========================================================================*/
+template <class mapType> const char *lookupNameByValue(const mapType *arr,
+ size_t len, int value)
+{
+ for (size_t i = 0; i < len; i++) {
+ if (arr[i].val == value) {
+ return arr[i].desc;
+ }
+ }
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : setPreviewSize
+ *
+ * DESCRIPTION: set preview size from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewSize(const QCameraParameters& params)
+{
+ int width = 0, height = 0;
+ int old_width = 0, old_height = 0;
+ params.getPreviewSize(&width, &height);
+ CameraParameters::getPreviewSize(&old_width, &old_height);
+
+ // Validate the preview size
+ for (size_t i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+ if (width == m_pCapability->preview_sizes_tbl[i].width
+ && height == m_pCapability->preview_sizes_tbl[i].height) {
+ // check if need to restart preview in case of preview size change
+ if (width != old_width || height != old_height) {
+ LOGI("Requested preview size %d x %d", width, height);
+ m_bNeedRestart = true;
+ }
+ // set the new value
+ CameraParameters::setPreviewSize(width, height);
+ return NO_ERROR;
+ }
+ }
+ if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+ char prop[PROPERTY_VALUE_MAX];
+ // set prop to configure aux preview size
+ property_get("persist.camera.aux.preview.size", prop, "0");
+ parse_pair(prop, &width, &height, 'x', NULL);
+ bool foundMatch = false;
+ for (size_t i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+ if (width == m_pCapability->preview_sizes_tbl[i].width &&
+ height == m_pCapability->preview_sizes_tbl[i].height) {
+ foundMatch = true;
+ }
+ }
+ if (!foundMatch) {
+ width = m_pCapability->preview_sizes_tbl[0].width;
+ height = m_pCapability->preview_sizes_tbl[0].height;
+ }
+ // check if need to restart preview in case of preview size change
+ if (width != old_width || height != old_height) {
+ m_bNeedRestart = true;
+ }
+ CameraParameters::setPreviewSize(width, height);
+ LOGH("Secondary Camera: preview size %d x %d", width, height);
+ return NO_ERROR;
+ }
+
+ LOGE("Invalid preview size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setPictureSize
+ *
+ * DESCRIPTION: set picture size from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureSize(const QCameraParameters& params)
+{
+ int width, height;
+ params.getPictureSize(&width, &height);
+ int old_width, old_height;
+ CameraParameters::getPictureSize(&old_width, &old_height);
+
+ // Validate the picture size
+ if(!m_reprocScaleParam.isScaleEnabled()){
+ for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+ if (width == m_pCapability->picture_sizes_tbl[i].width
+ && height == m_pCapability->picture_sizes_tbl[i].height) {
+ // check if need to restart preview in case of picture size change
+ if ((m_bZslMode || m_bRecordingHint) &&
+ (width != old_width || height != old_height)) {
+ LOGI("Requested picture size %d x %d", width, height);
+ m_bNeedRestart = true;
+ }
+ // set the new value
+ CameraParameters::setPictureSize(width, height);
+ // Update View angles based on Picture Aspect ratio
+ updateViewAngles();
+ return NO_ERROR;
+ }
+ }
+ }else{
+ //should use scaled picture size table to validate
+ if(m_reprocScaleParam.setValidatePicSize(width, height) == NO_ERROR){
+ // check if need to restart preview in case of picture size change
+ if ((m_bZslMode || m_bRecordingHint) &&
+ (width != old_width || height != old_height)) {
+ m_bNeedRestart = true;
+ }
+ // set the new value
+ char val[32];
+ snprintf(val, sizeof(val), "%dx%d", width, height);
+ updateParamEntry(KEY_PICTURE_SIZE, val);
+ LOGH("%s", val);
+ // Update View angles based on Picture Aspect ratio
+ updateViewAngles();
+ return NO_ERROR;
+ }
+ }
+ if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+ char prop[PROPERTY_VALUE_MAX];
+ // set prop to configure aux preview size
+ property_get("persist.camera.aux.picture.size", prop, "0");
+ parse_pair(prop, &width, &height, 'x', NULL);
+ bool foundMatch = false;
+ for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+ if (width == m_pCapability->picture_sizes_tbl[i].width &&
+ height == m_pCapability->picture_sizes_tbl[i].height) {
+ foundMatch = true;
+ }
+ }
+ if (!foundMatch) {
+ width = m_pCapability->picture_sizes_tbl[0].width;
+ height = m_pCapability->picture_sizes_tbl[0].height;
+ }
+ // check if need to restart preview in case of preview size change
+ if (width != old_width || height != old_height) {
+ m_bNeedRestart = true;
+ }
+ char val[32];
+ snprintf(val, sizeof(val), "%dx%d", width, height);
+ set(KEY_PICTURE_SIZE, val);
+ LOGH("Secondary Camera: picture size %s", val);
+ return NO_ERROR;
+ }
+ LOGE("Invalid picture size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : updateViewAngles
+ *
+ * DESCRIPTION: Update the Horizontal & Vertical based on the Aspect ratio of Preview and
+ * Picture aspect ratio
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::updateViewAngles()
+{
+ double stillAspectRatio, maxPictureAspectRatio;
+ int stillWidth, stillHeight, maxWidth, maxHeight;
+ // The crop factors from the full sensor array to the still picture crop region
+ double horizCropFactor = 1.f,vertCropFactor = 1.f;
+ float horizViewAngle, vertViewAngle, maxHfov, maxVfov;
+
+ // Get current Picture & max Snapshot sizes
+ getPictureSize(&stillWidth, &stillHeight);
+ maxWidth = m_pCapability->picture_sizes_tbl[0].width;
+ maxHeight = m_pCapability->picture_sizes_tbl[0].height;
+
+ // Get default maximum FOV from corresponding sensor driver
+ maxHfov = m_pCapability->hor_view_angle;
+ maxVfov = m_pCapability->ver_view_angle;
+
+ stillAspectRatio = (double)stillWidth/stillHeight;
+ maxPictureAspectRatio = (double)maxWidth/maxHeight;
+ LOGD("Stillwidth: %d, height: %d", stillWidth, stillHeight);
+ LOGD("Max width: %d, height: %d", maxWidth, maxHeight);
+ LOGD("still aspect: %f, Max Pic Aspect: %f",
+ stillAspectRatio, maxPictureAspectRatio);
+
+ // crop as per the Maximum Snapshot aspect ratio
+ if (stillAspectRatio < maxPictureAspectRatio)
+ horizCropFactor = stillAspectRatio/maxPictureAspectRatio;
+ else
+ vertCropFactor = maxPictureAspectRatio/stillAspectRatio;
+
+ LOGD("horizCropFactor %f, vertCropFactor %f",
+ horizCropFactor, vertCropFactor);
+
+ // Now derive the final FOV's based on field of view formula is i.e,
+ // angle of view = 2 * arctangent ( d / 2f )
+ // where d is the physical sensor dimension of interest, and f is
+ // the focal length. This only applies to rectilinear sensors, for focusing
+ // at distances >> f, etc.
+ // Here d/2f is nothing but the Maximum Horizontal or Veritical FOV
+ horizViewAngle = (180/PI)*2*atan(horizCropFactor*tan((maxHfov/2)*(PI/180)));
+ vertViewAngle = (180/PI)*2*atan(horizCropFactor*tan((maxVfov/2)*(PI/180)));
+
+ setFloat(QCameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, horizViewAngle);
+ setFloat(QCameraParameters::KEY_VERTICAL_VIEW_ANGLE, vertViewAngle);
+ LOGH("Final horizViewAngle %f, vertViewAngle %f",
+ horizViewAngle, vertViewAngle);
+}
+
+/*===========================================================================
+ * FUNCTION : setVideoSize
+ *
+ * DESCRIPTION: set video size from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoSize(const QCameraParameters& params)
+{
+ const char *str= NULL;
+ int width, height;
+ str = params.get(KEY_VIDEO_SIZE);
+ int old_width, old_height;
+ CameraParameters::getVideoSize(&old_width, &old_height);
+ if(!str) {
+ //If application didn't set this parameter string, use the values from
+ //getPreviewSize() as video dimensions.
+ params.getPreviewSize(&width, &height);
+ LOGW("No Record Size requested, use the preview dimensions");
+ } else {
+ params.getVideoSize(&width, &height);
+ }
+
+ // Validate the video size
+ for (size_t i = 0; i < m_pCapability->video_sizes_tbl_cnt; ++i) {
+ if (width == m_pCapability->video_sizes_tbl[i].width
+ && height == m_pCapability->video_sizes_tbl[i].height) {
+ // check if need to restart preview in case of video size change
+ if (m_bRecordingHint &&
+ (width != old_width || height != old_height)) {
+ m_bNeedRestart = true;
+ }
+
+ // set the new value
+ LOGH("Requested video size %d x %d", width, height);
+ CameraParameters::setVideoSize(width, height);
+ return NO_ERROR;
+ }
+ }
+ if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+ // Set the default preview size for secondary camera
+ width = m_pCapability->video_sizes_tbl[0].width;
+ height = m_pCapability->video_sizes_tbl[0].height;
+ // check if need to restart preview in case of preview size change
+ if (width != old_width || height != old_height) {
+ m_bNeedRestart = true;
+ }
+
+ CameraParameters::setVideoSize(width, height);
+ LOGH("Secondary Camera: video size %d x %d",
+ width, height);
+ return NO_ERROR;
+ }
+
+ LOGE("Error !! Invalid video size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : getLiveSnapshotSize
+ *
+ * DESCRIPTION: get live snapshot size
+ *
+ * PARAMETERS : dim - Update dim with the liveshot size
+ *
+ *==========================================================================*/
+void QCameraParameters::getLiveSnapshotSize(cam_dimension_t &dim)
+{
+ if(is4k2kVideoResolution()) {
+ // We support maximum 8M liveshot @4K2K video resolution
+ cam_dimension_t resolution = {0, 0};
+ CameraParameters::getVideoSize(&resolution.width, &resolution.height);
+ if((m_LiveSnapshotSize.width > resolution.width) ||
+ (m_LiveSnapshotSize.height > resolution.height)) {
+ m_LiveSnapshotSize.width = resolution.width;
+ m_LiveSnapshotSize.height = resolution.height;
+ }
+ }
+ dim = m_LiveSnapshotSize;
+ LOGH("w x h: %d x %d", dim.width, dim.height);
+}
+
+/*===========================================================================
+ * FUNCTION : setLiveSnapshotSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLiveSnapshotSize(const QCameraParameters& params)
+{
+ char value[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.opt.livepic", value, "1");
+ bool useOptimal = atoi(value) > 0 ? true : false;
+ bool vHdrOn;
+ int32_t liveSnapWidth = 0, liveSnapHeight = 0;
+ // use picture size from user setting
+ params.getPictureSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+
+ size_t livesnapshot_sizes_tbl_cnt =
+ m_pCapability->livesnapshot_sizes_tbl_cnt;
+ cam_dimension_t *livesnapshot_sizes_tbl =
+ &m_pCapability->livesnapshot_sizes_tbl[0];
+
+ if(is4k2kVideoResolution()) {
+ // We support maximum 8M liveshot @4K2K video resolution
+ cam_dimension_t resolution = {0, 0};
+ CameraParameters::getVideoSize(&resolution.width, &resolution.height);
+ if((m_LiveSnapshotSize.width > resolution.width) ||
+ (m_LiveSnapshotSize.height > resolution.height)) {
+ m_LiveSnapshotSize.width = resolution.width;
+ m_LiveSnapshotSize.height = resolution.height;
+ }
+ }
+
+ // check if HFR is enabled
+ const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+ cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
+ const char *hsrStr = params.get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+ const char *vhdrStr = params.get(KEY_QC_VIDEO_HDR);
+ vHdrOn = (vhdrStr != NULL && (0 == strcmp(vhdrStr,"on"))) ? true : false;
+ if (vHdrOn) {
+ livesnapshot_sizes_tbl_cnt = m_pCapability->vhdr_livesnapshot_sizes_tbl_cnt;
+ livesnapshot_sizes_tbl = &m_pCapability->vhdr_livesnapshot_sizes_tbl[0];
+ }
+ if ((hsrStr != NULL) && strcmp(hsrStr, "off")) {
+ int32_t hsr = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hsrStr);
+ if ((hsr != NAME_NOT_FOUND) && (hsr > CAM_HFR_MODE_OFF)) {
+ // if HSR is enabled, change live snapshot size
+ for (size_t i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+ if (m_pCapability->hfr_tbl[i].mode == hsr) {
+ livesnapshot_sizes_tbl_cnt =
+ m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+ livesnapshot_sizes_tbl =
+ &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+ hfrMode = m_pCapability->hfr_tbl[i].mode;
+ break;
+ }
+ }
+ }
+ } else if ((hfrStr != NULL) && strcmp(hfrStr, "off")) {
+ int32_t hfr = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hfrStr);
+ if ((hfr != NAME_NOT_FOUND) && (hfr > CAM_HFR_MODE_OFF)) {
+ // if HFR is enabled, change live snapshot size
+ for (size_t i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+ if (m_pCapability->hfr_tbl[i].mode == hfr) {
+ livesnapshot_sizes_tbl_cnt =
+ m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+ livesnapshot_sizes_tbl =
+ &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+ hfrMode = m_pCapability->hfr_tbl[i].mode;
+ break;
+ }
+ }
+ }
+ }
+
+ if (useOptimal || hfrMode != CAM_HFR_MODE_OFF || vHdrOn) {
+ bool found = false;
+
+ // first check if picture size is within the list of supported sizes
+ for (size_t i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+ if (m_LiveSnapshotSize.width == livesnapshot_sizes_tbl[i].width &&
+ m_LiveSnapshotSize.height == livesnapshot_sizes_tbl[i].height) {
+ found = true;
+ break;
+ }
+ }
+
+ if (!found) {
+ // use optimal live snapshot size from supported list,
+ // that has same preview aspect ratio
+ int width = 0, height = 0;
+ params.getPreviewSize(&width, &height);
+
+ double previewAspectRatio = (double)width / height;
+ for (size_t i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+ double ratio = (double)livesnapshot_sizes_tbl[i].width /
+ livesnapshot_sizes_tbl[i].height;
+ if (fabs(previewAspectRatio - ratio) <= ASPECT_TOLERANCE) {
+ m_LiveSnapshotSize = livesnapshot_sizes_tbl[i];
+ found = true;
+ break;
+ }
+ }
+
+ if (!found && ((hfrMode != CAM_HFR_MODE_OFF) || vHdrOn)) {
+ // Cannot find matching aspect ration from supported live snapshot list
+ // choose the max dim from preview and video size
+ LOGD("Cannot find matching aspect ratio, choose max of preview or video size");
+ params.getVideoSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+ if (m_LiveSnapshotSize.width < width && m_LiveSnapshotSize.height < height) {
+ m_LiveSnapshotSize.width = width;
+ m_LiveSnapshotSize.height = height;
+ }
+ }
+ }
+ }
+ //To read liveshot resolution from setprop instead of matching aspect ratio.
+ //The setprop resolution format should be WxH.
+ //e.g: adb shell setprop persist.camera.liveshot.size 1280x720
+ memset(value, 0, PROPERTY_VALUE_MAX);
+ property_get("persist.camera.liveshot.size", value, "");
+ if (strlen(value) > 0) {
+ char *saveptr = NULL;
+ char *token = strtok_r(value, "x", &saveptr);
+ if (token != NULL) {
+ liveSnapWidth = atoi(token);
+ }
+ token = strtok_r(NULL, "x", &saveptr);
+ if (token != NULL) {
+ liveSnapHeight = atoi(token);
+ }
+ if ((liveSnapWidth!=0) && (liveSnapHeight!=0)) {
+ for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+ if (liveSnapWidth == m_pCapability->picture_sizes_tbl[i].width
+ && liveSnapHeight == m_pCapability->picture_sizes_tbl[i].height) {
+ m_LiveSnapshotSize.width = liveSnapWidth;
+ m_LiveSnapshotSize.height = liveSnapHeight;
+ break;
+ }
+ }
+ }
+ }
+ LOGH("live snapshot size %d x %d",
+ m_LiveSnapshotSize.width, m_LiveSnapshotSize.height);
+
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : setRawSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRawSize(cam_dimension_t &dim)
+{
+ m_rawSize = dim;
+ return NO_ERROR;
+}
+/*===========================================================================
+ * FUNCTION : setPreviewFormat
+ *
+ * DESCRIPTION: set preview format from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFormat(const QCameraParameters& params)
+{
+ const char *str = params.getPreviewFormat();
+ int32_t previewFormat = lookupAttr(PREVIEW_FORMATS_MAP,
+ PARAM_MAP_SIZE(PREVIEW_FORMATS_MAP), str);
+ if (previewFormat != NAME_NOT_FOUND) {
+ if (isUBWCEnabled()) {
+ char prop[PROPERTY_VALUE_MAX];
+ int pFormat;
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.preview.ubwc", prop, "1");
+
+ pFormat = atoi(prop);
+ if (pFormat == 1) {
+ mPreviewFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
+ mAppPreviewFormat = (cam_format_t)previewFormat;
+ } else {
+ mPreviewFormat = (cam_format_t)previewFormat;
+ mAppPreviewFormat = (cam_format_t)previewFormat;
+ }
+ } else {
+ mPreviewFormat = (cam_format_t)previewFormat;
+ mAppPreviewFormat = (cam_format_t)previewFormat;
+ }
+ CameraParameters::setPreviewFormat(str);
+ LOGH("format %d\n", mPreviewFormat);
+ return NO_ERROR;
+ }
+ LOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setPictureFormat
+ *
+ * DESCRIPTION: set picture format from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureFormat(const QCameraParameters& params)
+{
+ const char *str = params.getPictureFormat();
+ int32_t pictureFormat = lookupAttr(PICTURE_TYPES_MAP, PARAM_MAP_SIZE(PICTURE_TYPES_MAP), str);
+ if (pictureFormat != NAME_NOT_FOUND) {
+ mPictureFormat = pictureFormat;
+
+ CameraParameters::setPictureFormat(str);
+ LOGH("format %d\n", mPictureFormat);
+ return NO_ERROR;
+ }
+ LOGE("Invalid picture format value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegThumbnailSize
+ *
+ * DESCRIPTION: set jpeg thumbnail size from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegThumbnailSize(const QCameraParameters& params)
+{
+ int width = params.getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+ int height = params.getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ LOGD("requested jpeg thumbnail size %d x %d", width, height);
+ int sizes_cnt = sizeof(THUMBNAIL_SIZES_MAP) / sizeof(cam_dimension_t);
+ // Validate thumbnail size
+ for (int i = 0; i < sizes_cnt; i++) {
+ if (width == THUMBNAIL_SIZES_MAP[i].width &&
+ height == THUMBNAIL_SIZES_MAP[i].height) {
+ set(KEY_JPEG_THUMBNAIL_WIDTH, width);
+ set(KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ return NO_ERROR;
+ }
+ }
+ LOGE("error: setting jpeg thumbnail size (%d, %d)", width, height);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+
+ * FUNCTION : setBurstLEDOnPeriod
+ *
+ * DESCRIPTION: set burst LED on period
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBurstLEDOnPeriod(const QCameraParameters& params)
+{
+ int nBurstLEDOnPeriod = params.getInt(KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD);
+ //Check if the LED ON period is within limits
+ if ((nBurstLEDOnPeriod <= 0) || (nBurstLEDOnPeriod > 800)) {
+ // if burst led on period is not set in parameters,
+ // read from sys prop
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.led.on.period", prop, "0");
+ nBurstLEDOnPeriod = atoi(prop);
+ if (nBurstLEDOnPeriod <= 0) {
+ nBurstLEDOnPeriod = 300;
+ }
+ }
+
+ set(KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD, nBurstLEDOnPeriod);
+ m_nBurstLEDOnPeriod = nBurstLEDOnPeriod;
+ LOGH("Burst LED on period %u", m_nBurstLEDOnPeriod);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BURST_LED_ON_PERIOD,
+ (uint32_t)nBurstLEDOnPeriod)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION : setRetroActiveBurstNum
+ *
+ * DESCRIPTION: set retro active burst num
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRetroActiveBurstNum(
+ const QCameraParameters& params)
+{
+ int32_t nBurstNum = params.getInt(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER);
+ LOGH("m_nRetroBurstNum = %d", m_nRetroBurstNum);
+ if (nBurstNum <= 0) {
+ // if burst number is not set in parameters,
+ // read from sys prop
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.retro.number", prop, "0");
+ nBurstNum = atoi(prop);
+ if (nBurstNum < 0) {
+ nBurstNum = 0;
+ }
+ }
+
+ set(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER, nBurstNum);
+
+ m_nRetroBurstNum = nBurstNum;
+ LOGH("m_nRetroBurstNum = %d", m_nRetroBurstNum);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegQuality
+ *
+ * DESCRIPTION: set jpeg encpding quality from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegQuality(const QCameraParameters& params)
+{
+ int32_t rc = NO_ERROR;
+ int quality = params.getInt(KEY_JPEG_QUALITY);
+ if (quality >= 0 && quality <= 100) {
+ set(KEY_JPEG_QUALITY, quality);
+ } else {
+ LOGE("Invalid jpeg quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+
+ quality = params.getInt(KEY_JPEG_THUMBNAIL_QUALITY);
+ if (quality >= 0 && quality <= 100) {
+ set(KEY_JPEG_THUMBNAIL_QUALITY, quality);
+ } else {
+ LOGE("Invalid jpeg thumbnail quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setOrientaion
+ *
+ * DESCRIPTION: set orientaion from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOrientation(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_ORIENTATION);
+
+ if (str != NULL) {
+ if (strcmp(str, portrait) == 0 || strcmp(str, landscape) == 0) {
+ // Camera service needs this to decide if the preview frames and raw
+ // pictures should be rotated.
+ set(KEY_QC_ORIENTATION, str);
+ } else {
+ LOGE("Invalid orientation value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_AUTO_EXPOSURE);
+ const char *prev_str = get(KEY_QC_AUTO_EXPOSURE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setAutoExposure(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setPreviewFpsRange
+ *
+ * DESCRIPTION: set preview FPS range from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(const QCameraParameters& params)
+{
+ int minFps,maxFps;
+ int prevMinFps, prevMaxFps, vidMinFps, vidMaxFps;
+ int rc = NO_ERROR;
+ bool found = false, updateNeeded = false;
+
+ CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+ params.getPreviewFpsRange(&minFps, &maxFps);
+
+ LOGH("FpsRange Values:(%d, %d)", prevMinFps, prevMaxFps);
+ LOGH("Requested FpsRange Values:(%d, %d)", minFps, maxFps);
+
+ //first check if we need to change fps because of HFR mode change
+ updateNeeded = UpdateHFRFrameRate(params);
+ if (updateNeeded) {
+ m_bNeedRestart = true;
+ rc = setHighFrameRate(mHfrMode);
+ if (rc != NO_ERROR) goto end;
+ }
+ LOGH("UpdateHFRFrameRate %d", updateNeeded);
+
+ vidMinFps = (int)m_hfrFpsRange.video_min_fps;
+ vidMaxFps = (int)m_hfrFpsRange.video_max_fps;
+
+ if(minFps == prevMinFps && maxFps == prevMaxFps) {
+ if ( m_bFixedFrameRateSet ) {
+ minFps = params.getPreviewFrameRate() * 1000;
+ maxFps = params.getPreviewFrameRate() * 1000;
+ m_bFixedFrameRateSet = false;
+ } else if (!updateNeeded) {
+ LOGH("No change in FpsRange");
+ rc = NO_ERROR;
+ goto end;
+ }
+ }
+ for(size_t i = 0; i < m_pCapability->fps_ranges_tbl_cnt; i++) {
+ // if the value is in the supported list
+ if (minFps >= m_pCapability->fps_ranges_tbl[i].min_fps * 1000 &&
+ maxFps <= m_pCapability->fps_ranges_tbl[i].max_fps * 1000) {
+ found = true;
+ LOGH("FPS i=%d : minFps = %d, maxFps = %d"
+ " vidMinFps = %d, vidMaxFps = %d",
+ i, minFps, maxFps,
+ (int)m_hfrFpsRange.video_min_fps,
+ (int)m_hfrFpsRange.video_max_fps);
+ if ((0.0f >= m_hfrFpsRange.video_min_fps) ||
+ (0.0f >= m_hfrFpsRange.video_max_fps)) {
+ vidMinFps = minFps;
+ vidMaxFps = maxFps;
+ }
+ else {
+ vidMinFps = (int)m_hfrFpsRange.video_min_fps;
+ vidMaxFps = (int)m_hfrFpsRange.video_max_fps;
+ }
+
+ setPreviewFpsRange(minFps, maxFps, vidMinFps, vidMaxFps);
+ break;
+ }
+ }
+ if(found == false){
+ LOGE("error: FPS range value not supported");
+ rc = BAD_VALUE;
+ }
+end:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : UpdateHFRFrameRate
+ *
+ * DESCRIPTION: set preview FPS range based on HFR setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : bool true/false
+ * true -if HAL needs to overwrite FPS range set by app, false otherwise.
+ *==========================================================================*/
+
+bool QCameraParameters::UpdateHFRFrameRate(const QCameraParameters& params)
+{
+ bool updateNeeded = false;
+ int min_fps, max_fps;
+ int32_t hfrMode = CAM_HFR_MODE_OFF;
+ int32_t newHfrMode = CAM_HFR_MODE_OFF;
+
+ int parm_minfps,parm_maxfps;
+ int prevMinFps, prevMaxFps;
+ CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+ params.getPreviewFpsRange(&parm_minfps, &parm_maxfps);
+ LOGH("CameraParameters - : minFps = %d, maxFps = %d ",
+ prevMinFps, prevMaxFps);
+ LOGH("Requested params - : minFps = %d, maxFps = %d ",
+ parm_minfps, parm_maxfps);
+
+ const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+ const char *hsrStr = params.get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+ const char *prev_hfrStr = CameraParameters::get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+ const char *prev_hsrStr = CameraParameters::get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+ if ((hfrStr != NULL) && (prev_hfrStr != NULL) && strcmp(hfrStr, prev_hfrStr)) {
+ updateParamEntry(KEY_QC_VIDEO_HIGH_FRAME_RATE, hfrStr);
+ }
+
+ if ((hsrStr != NULL) && (prev_hsrStr != NULL) && strcmp(hsrStr, prev_hsrStr)) {
+ updateParamEntry(KEY_QC_VIDEO_HIGH_SPEED_RECORDING, hsrStr);
+
+ }
+
+ // check if HFR is enabled
+ if ((hfrStr != NULL) && strcmp(hfrStr, "off")) {
+ hfrMode = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hfrStr);
+ if (NAME_NOT_FOUND != hfrMode) newHfrMode = hfrMode;
+ }
+ // check if HSR is enabled
+ else if ((hsrStr != NULL) && strcmp(hsrStr, "off")) {
+ hfrMode = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hsrStr);
+ if (NAME_NOT_FOUND != hfrMode) newHfrMode = hfrMode;
+ }
+ LOGH("prevHfrMode - %d, currentHfrMode = %d ",
+ mHfrMode, newHfrMode);
+
+ if (mHfrMode != newHfrMode) {
+ updateNeeded = true;
+ mHfrMode = newHfrMode;
+ switch (mHfrMode) {
+ case CAM_HFR_MODE_60FPS:
+ min_fps = 60000;
+ max_fps = 60000;
+ break;
+ case CAM_HFR_MODE_90FPS:
+ min_fps = 90000;
+ max_fps = 90000;
+ break;
+ case CAM_HFR_MODE_120FPS:
+ min_fps = 120000;
+ max_fps = 120000;
+ break;
+ case CAM_HFR_MODE_150FPS:
+ min_fps = 150000;
+ max_fps = 150000;
+ break;
+ case CAM_HFR_MODE_180FPS:
+ min_fps = 180000;
+ max_fps = 180000;
+ break;
+ case CAM_HFR_MODE_210FPS:
+ min_fps = 210000;
+ max_fps = 210000;
+ break;
+ case CAM_HFR_MODE_240FPS:
+ min_fps = 240000;
+ max_fps = 240000;
+ break;
+ case CAM_HFR_MODE_480FPS:
+ min_fps = 480000;
+ max_fps = 480000;
+ break;
+ case CAM_HFR_MODE_OFF:
+ default:
+ // Set Video Fps to zero
+ min_fps = 0;
+ max_fps = 0;
+ break;
+ }
+ m_hfrFpsRange.video_min_fps = (float)min_fps;
+ m_hfrFpsRange.video_max_fps = (float)max_fps;
+
+ LOGH("HFR mode (%d) Set video FPS : minFps = %d, maxFps = %d ",
+ mHfrMode, min_fps, max_fps);
+ }
+
+ // Remember if HFR mode is ON
+ if ((mHfrMode > CAM_HFR_MODE_OFF) && (mHfrMode < CAM_HFR_MODE_MAX)) {
+ LOGH("HFR mode is ON");
+ m_bHfrMode = true;
+ } else {
+ m_hfrFpsRange.video_min_fps = 0;
+ m_hfrFpsRange.video_max_fps = 0;
+ m_bHfrMode = false;
+ LOGH("HFR mode is OFF");
+ }
+ m_hfrFpsRange.min_fps = (float)parm_minfps;
+ m_hfrFpsRange.max_fps = (float)parm_maxfps;
+
+ if (m_bHfrMode && (mHfrMode > CAM_HFR_MODE_120FPS)
+ && (parm_maxfps != 0)) {
+ //Configure buffer batch count to use batch mode for higher fps
+ setBufBatchCount((int8_t)(m_hfrFpsRange.video_max_fps / parm_maxfps));
+ } else {
+ //Reset batch count and update KEY for encoder
+ setBufBatchCount(0);
+ }
+ return updateNeeded;
+}
+
+/*===========================================================================
+ * FUNCTION : setPreviewFrameRate
+ *
+ * DESCRIPTION: set preview frame rate from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFrameRate(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_PREVIEW_FRAME_RATE);
+ const char *prev_str = get(KEY_PREVIEW_FRAME_RATE);
+
+ if ( str ) {
+ if ( prev_str &&
+ strcmp(str, prev_str)) {
+ LOGD("Requested Fixed Frame Rate %s", str);
+ updateParamEntry(KEY_PREVIEW_FRAME_RATE, str);
+ m_bFixedFrameRateSet = true;
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setEffect
+ *
+ * DESCRIPTION: set effect value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_EFFECT);
+ const char *prev_str = get(KEY_EFFECT);
+
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.effect", prop, "none");
+
+ if (strcmp(prop, "none")) {
+ if ((prev_str == NULL) ||
+ (strcmp(prop, prev_str) != 0) ||
+ (m_bUpdateEffects == true)) {
+ m_bUpdateEffects = false;
+ return setEffect(prop);
+ }
+ } else if (str != NULL) {
+ if ((prev_str == NULL) ||
+ (strcmp(str, prev_str) != 0) ||
+ (m_bUpdateEffects == true)) {
+ m_bUpdateEffects = false;
+ return setEffect(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFocusMode
+ *
+ * DESCRIPTION: set focus mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_FOCUS_MODE);
+ const char *prev_str = get(KEY_FOCUS_MODE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setFocusMode(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFocusPosition
+ *
+ * DESCRIPTION: set focus position from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusPosition(const QCameraParameters& params)
+{
+ const char *focus_str = params.get(KEY_FOCUS_MODE);
+ const char *prev_focus_str = get(KEY_FOCUS_MODE);
+
+ if (NULL == focus_str) {
+ return NO_ERROR;
+ }
+
+ LOGD("current focus mode: %s", focus_str);
+ if (strcmp(focus_str, FOCUS_MODE_MANUAL_POSITION)) {
+ LOGH(", dont set focus pos to back-end!");
+ return NO_ERROR;
+ }
+
+ const char *pos = params.get(KEY_QC_MANUAL_FOCUS_POSITION);
+ const char *prev_pos = get(KEY_QC_MANUAL_FOCUS_POSITION);
+ const char *type = params.get(KEY_QC_MANUAL_FOCUS_POS_TYPE);
+ const char *prev_type = get(KEY_QC_MANUAL_FOCUS_POS_TYPE);
+
+ if ((pos != NULL) && (type != NULL) && (focus_str != NULL)) {
+ if (prev_pos == NULL || (strcmp(pos, prev_pos) != 0) ||
+ prev_type == NULL || (strcmp(type, prev_type) != 0) ||
+ prev_focus_str == NULL || (strcmp(focus_str, prev_focus_str) != 0)) {
+ return setFocusPosition(type, pos);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setBrightness
+ *
+ * DESCRIPTION: set brightness control value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(const QCameraParameters& params)
+{
+ int currentBrightness = getInt(KEY_QC_BRIGHTNESS);
+ int brightness = params.getInt(KEY_QC_BRIGHTNESS);
+
+ if(params.get(KEY_QC_BRIGHTNESS) == NULL) {
+ LOGH("Brigtness not set by App ");
+ return NO_ERROR;
+ }
+ if (currentBrightness != brightness) {
+ if (brightness >= m_pCapability->brightness_ctrl.min_value &&
+ brightness <= m_pCapability->brightness_ctrl.max_value) {
+ LOGD("new brightness value : %d ", brightness);
+ return setBrightness(brightness);
+ } else {
+ LOGE("invalid value %d out of (%d, %d)",
+ brightness,
+ m_pCapability->brightness_ctrl.min_value,
+ m_pCapability->brightness_ctrl.max_value);
+ return BAD_VALUE;
+ }
+ } else {
+ LOGD("No brightness value changed.");
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getBrightness
+ *
+ * DESCRIPTION: get brightness control value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::getBrightness()
+{
+ return getInt(KEY_QC_BRIGHTNESS);
+}
+
+/*===========================================================================
+ * FUNCTION : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(const QCameraParameters& params)
+{
+ int shaprness = params.getInt(KEY_QC_SHARPNESS);
+ int prev_sharp = getInt(KEY_QC_SHARPNESS);
+
+ if(params.get(KEY_QC_SHARPNESS) == NULL) {
+ LOGH("Sharpness not set by App ");
+ return NO_ERROR;
+ }
+ if (prev_sharp != shaprness) {
+ if((shaprness >= m_pCapability->sharpness_ctrl.min_value) &&
+ (shaprness <= m_pCapability->sharpness_ctrl.max_value)) {
+ LOGD("new sharpness value : %d ", shaprness);
+ return setSharpness(shaprness);
+ } else {
+ LOGE("invalid value %d out of (%d, %d)",
+ shaprness,
+ m_pCapability->sharpness_ctrl.min_value,
+ m_pCapability->sharpness_ctrl.max_value);
+ return BAD_VALUE;
+ }
+ } else {
+ LOGD("No value change in shaprness");
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setSkintoneEnahancement
+ *
+ * DESCRIPTION: set skin tone enhancement factor from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(const QCameraParameters& params)
+{
+ int sceFactor = params.getInt(KEY_QC_SCE_FACTOR);
+ int prev_sceFactor = getInt(KEY_QC_SCE_FACTOR);
+
+ if(params.get(KEY_QC_SCE_FACTOR) == NULL) {
+ LOGH("Skintone enhancement not set by App ");
+ return NO_ERROR;
+ }
+ if (prev_sceFactor != sceFactor) {
+ if((sceFactor >= m_pCapability->sce_ctrl.min_value) &&
+ (sceFactor <= m_pCapability->sce_ctrl.max_value)) {
+ LOGD("new Skintone Enhancement value : %d ", sceFactor);
+ return setSkinToneEnhancement(sceFactor);
+ } else {
+ LOGE("invalid value %d out of (%d, %d)",
+ sceFactor,
+ m_pCapability->sce_ctrl.min_value,
+ m_pCapability->sce_ctrl.max_value);
+ return BAD_VALUE;
+ }
+ } else {
+ LOGD("No value change in skintone enhancement factor");
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setSaturation
+ *
+ * DESCRIPTION: set saturation control value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(const QCameraParameters& params)
+{
+ int saturation = params.getInt(KEY_QC_SATURATION);
+ int prev_sat = getInt(KEY_QC_SATURATION);
+
+ if(params.get(KEY_QC_SATURATION) == NULL) {
+ LOGH("Saturation not set by App ");
+ return NO_ERROR;
+ }
+ if (prev_sat != saturation) {
+ if((saturation >= m_pCapability->saturation_ctrl.min_value) &&
+ (saturation <= m_pCapability->saturation_ctrl.max_value)) {
+ LOGD("new saturation value : %d ", saturation);
+ return setSaturation(saturation);
+ } else {
+ LOGE("invalid value %d out of (%d, %d)",
+ saturation,
+ m_pCapability->saturation_ctrl.min_value,
+ m_pCapability->saturation_ctrl.max_value);
+ return BAD_VALUE;
+ }
+ } else {
+ LOGD("No value change in saturation factor");
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setContrast
+ *
+ * DESCRIPTION: set contrast control value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(const QCameraParameters& params)
+{
+ int contrast = params.getInt(KEY_QC_CONTRAST);
+ int prev_contrast = getInt(KEY_QC_CONTRAST);
+
+ if(params.get(KEY_QC_CONTRAST) == NULL) {
+ LOGH("Contrast not set by App ");
+ return NO_ERROR;
+ }
+ if (prev_contrast != contrast) {
+ if((contrast >= m_pCapability->contrast_ctrl.min_value) &&
+ (contrast <= m_pCapability->contrast_ctrl.max_value)) {
+ LOGD("new contrast value : %d ", contrast);
+ int32_t rc = setContrast(contrast);
+ return rc;
+ } else {
+ LOGE("invalid value %d out of (%d, %d)",
+ contrast,
+ m_pCapability->contrast_ctrl.min_value,
+ m_pCapability->contrast_ctrl.max_value);
+ return BAD_VALUE;
+ }
+ } else {
+ LOGD("No value change in contrast");
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(const QCameraParameters & params)
+{
+ int expComp = params.getInt(KEY_EXPOSURE_COMPENSATION);
+ int prev_expComp = getInt(KEY_EXPOSURE_COMPENSATION);
+
+ if(params.get(KEY_EXPOSURE_COMPENSATION) == NULL) {
+ LOGH("Exposure compensation not set by App ");
+ return NO_ERROR;
+ }
+ if (prev_expComp != expComp) {
+ if((expComp >= m_pCapability->exposure_compensation_min) &&
+ (expComp <= m_pCapability->exposure_compensation_max)) {
+ LOGD("new Exposure Compensation value : %d ", expComp);
+ return setExposureCompensation(expComp);
+ } else {
+ LOGE("invalid value %d out of (%d, %d)",
+ expComp,
+ m_pCapability->exposure_compensation_min,
+ m_pCapability->exposure_compensation_max);
+ return BAD_VALUE;
+ }
+ } else {
+ LOGD("No value change in Exposure Compensation");
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_WHITE_BALANCE);
+ const char *prev_str = get(KEY_WHITE_BALANCE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setWhiteBalance(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setManualWhiteBalance
+ *
+ * DESCRIPTION: set manual white balance from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setManualWhiteBalance(const QCameraParameters& params)
+{
+ int32_t rc = NO_ERROR;
+ const char *wb_str = params.get(KEY_WHITE_BALANCE);
+ const char *prev_wb_str = get(KEY_WHITE_BALANCE);
+ LOGD("current wb mode: %s", wb_str);
+
+ if (wb_str != NULL) {
+ if (strcmp(wb_str, WHITE_BALANCE_MANUAL)) {
+ LOGD("dont set cct to back-end.");
+ return NO_ERROR;
+ }
+ }
+
+ const char *value = params.get(KEY_QC_MANUAL_WB_VALUE);
+ const char *prev_value = get(KEY_QC_MANUAL_WB_VALUE);
+ const char *type = params.get(KEY_QC_MANUAL_WB_TYPE);
+ const char *prev_type = get(KEY_QC_MANUAL_WB_TYPE);
+
+ if ((value != NULL) && (type != NULL) && (wb_str != NULL)) {
+ if (prev_value == NULL || (strcmp(value, prev_value) != 0) ||
+ prev_type == NULL || (strcmp(type, prev_type) != 0) ||
+ prev_wb_str == NULL || (strcmp(wb_str, prev_wb_str) != 0)) {
+ updateParamEntry(KEY_QC_MANUAL_WB_TYPE, type);
+ updateParamEntry(KEY_QC_MANUAL_WB_VALUE, value);
+ int32_t wb_type = atoi(type);
+ if (wb_type == CAM_MANUAL_WB_MODE_CCT) {
+ rc = setWBManualCCT(value);
+ } else if (wb_type == CAM_MANUAL_WB_MODE_GAIN) {
+ rc = setManualWBGains(value);
+ } else {
+ rc = BAD_VALUE;
+ }
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_ANTIBANDING);
+ const char *prev_str = get(KEY_ANTIBANDING);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setAntibanding(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setStatsDebugMask
+ *
+ * DESCRIPTION: get the value from persist file in Stats module that will
+ * control funtionality in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStatsDebugMask()
+{
+ uint32_t mask = 0;
+ char value[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.stats.debug.mask", value, "0");
+ mask = (uint32_t)atoi(value);
+
+ LOGH("ctrl mask :%d", mask);
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_STATS_DEBUG_MASK, mask)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setPAAF
+ *
+ * DESCRIPTION: get the value from persist file in Stats module that will
+ * control the preview assisted AF in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPAAF()
+{
+ uint32_t paaf = 0;
+ char value[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.stats.af.paaf", value, "1");
+ paaf = (uint32_t)atoi(value);
+
+ LOGH("PAAF is: %s", paaf ? "ON": "OFF");
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_STATS_AF_PAAF, paaf)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_SCENE_DETECT);
+ const char *prev_str = get(KEY_QC_SCENE_DETECT);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setSceneDetect(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_VIDEO_HDR);
+ const char *prev_str = get(KEY_QC_VIDEO_HDR);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setVideoHDR(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setVtEnable
+ *
+ * DESCRIPTION: set vt Time Stamp enable from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVtEnable(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_VT_ENABLE);
+ const char *prev_str = get(KEY_QC_VT_ENABLE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setVtEnable(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_FACE_RECOGNITION);
+ const char *prev_str = get(KEY_QC_FACE_RECOGNITION);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ uint32_t maxFaces = (uint32_t)params.getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+ return setFaceRecognition(str, maxFaces);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setZoom
+ *
+ * DESCRIPTION: set zoom value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(const QCameraParameters& params)
+{
+ if ((m_pCapability->zoom_supported == 0 ||
+ m_pCapability->zoom_ratio_tbl_cnt == 0)) {
+ LOGH("no zoom support");
+ return NO_ERROR;
+ }
+
+ int zoomLevel = params.getInt(KEY_ZOOM);
+ mParmZoomLevel = zoomLevel;
+ if ((zoomLevel < 0) || (zoomLevel >= (int)m_pCapability->zoom_ratio_tbl_cnt)) {
+ LOGE("invalid value %d out of (%d, %d)",
+ zoomLevel,
+ 0, m_pCapability->zoom_ratio_tbl_cnt-1);
+ return BAD_VALUE;
+ }
+
+ int prevZoomLevel = getInt(KEY_ZOOM);
+ if (prevZoomLevel == zoomLevel) {
+ LOGD("No value change in zoom %d %d", prevZoomLevel, zoomLevel);
+ return NO_ERROR;
+ }
+
+ return setZoom(zoomLevel);
+}
+
+/*===========================================================================
+ * FUNCTION : setISOValue
+ *
+ * DESCRIPTION: set ISO value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setISOValue(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_ISO_MODE);
+ const char *prev_str = get(KEY_QC_ISO_MODE);
+
+ if(getManualCaptureMode()) {
+ char iso_val[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.iso", iso_val, "");
+ if (strlen(iso_val) > 0) {
+ if (prev_str == NULL ||
+ strcmp(iso_val, prev_str) != 0) {
+ return setISOValue(iso_val);
+ }
+ }
+ } else if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setISOValue(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setContinuousISO
+ *
+ * DESCRIPTION: set ISO value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContinuousISO(const char *isoValue)
+{
+ char iso[PROPERTY_VALUE_MAX];
+ int32_t continous_iso = 0;
+
+ // Check if continuous ISO is set through setproperty
+ property_get("persist.camera.continuous.iso", iso, "");
+ if (strlen(iso) > 0) {
+ continous_iso = atoi(iso);
+ } else {
+ continous_iso = atoi(isoValue);
+ }
+
+ if ((continous_iso >= 0) &&
+ (continous_iso <= m_pCapability->sensitivity_range.max_sensitivity)) {
+ LOGH("Setting continuous ISO value %d", continous_iso);
+ updateParamEntry(KEY_QC_CONTINUOUS_ISO, isoValue);
+
+ cam_intf_parm_manual_3a_t iso_settings;
+ memset(&iso_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+ iso_settings.previewOnly = FALSE;
+ iso_settings.value = continous_iso;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ISO, iso_settings)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ LOGE("Invalid iso value: %d", continous_iso);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setExposureTime
+ *
+ * DESCRIPTION: set exposure time from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureTime(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_EXPOSURE_TIME);
+ const char *prev_str = get(KEY_QC_EXPOSURE_TIME);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setExposureTime(str);
+ }
+ } else if(getManualCaptureMode()) {
+ char expTime[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.exposure.time", expTime, "");
+ if (strlen(expTime) > 0) {
+ if (prev_str == NULL ||
+ strcmp(expTime, prev_str) != 0) {
+ return setExposureTime(expTime);
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setVideoRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoRotation(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_VIDEO_ROTATION);
+ if(str != NULL) {
+ int value = lookupAttr(VIDEO_ROTATION_MODES_MAP,
+ PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP), str);
+ if (value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_VIDEO_ROTATION, str);
+ LOGL("setVideoRotation: %d: ", str, value);
+ } else {
+ LOGE("Invalid rotation value: %d", value);
+ return BAD_VALUE;
+ }
+
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRotation(const QCameraParameters& params)
+{
+ int32_t rotation = params.getInt(KEY_ROTATION);
+ if (rotation != -1) {
+ if (rotation == 0 || rotation == 90 ||
+ rotation == 180 || rotation == 270) {
+ set(KEY_ROTATION, rotation);
+
+ ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_META_JPEG_ORIENTATION,
+ rotation);
+ mRotation = rotation;
+ } else {
+ LOGE("Invalid rotation value: %d", rotation);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFlash
+ *
+ * DESCRIPTION: set flash mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_FLASH_MODE);
+ const char *prev_str = get(KEY_FLASH_MODE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setFlash(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_AUTO_EXPOSURE_LOCK);
+ const char *prev_str = get(KEY_AUTO_EXPOSURE_LOCK);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setAecLock(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_AUTO_WHITEBALANCE_LOCK);
+ const char *prev_str = get(KEY_AUTO_WHITEBALANCE_LOCK);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setAwbLock(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setAutoHDR
+ *
+ * DESCRIPTION: Enable/disable auto HDR
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoHDR(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_AUTO_HDR_ENABLE);
+ const char *prev_str = get(KEY_QC_AUTO_HDR_ENABLE);
+ char prop[PROPERTY_VALUE_MAX];
+
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.auto.hdr.enable", prop, VALUE_DISABLE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ LOGH("Auto HDR set to: %s", str);
+ return updateParamEntry(KEY_QC_AUTO_HDR_ENABLE, str);
+ }
+ } else {
+ if (prev_str == NULL ||
+ strcmp(prev_str, prop) != 0 ) {
+ LOGH("Auto HDR set to: %s", prop);
+ updateParamEntry(KEY_QC_AUTO_HDR_ENABLE, prop);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION : isAutoHDREnabled
+*
+* DESCRIPTION: Query auto HDR status
+*
+* PARAMETERS : None
+*
+* RETURN : bool true/false
+*==========================================================================*/
+bool QCameraParameters::isAutoHDREnabled()
+{
+ const char *str = get(KEY_QC_AUTO_HDR_ENABLE);
+ if (str != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+ if (value == NAME_NOT_FOUND) {
+ LOGE("Invalid Auto HDR value %s", str);
+ return false;
+ }
+
+ LOGH("Auto HDR status is: %d", value);
+ return value ? true : false;
+ }
+
+ LOGH("Auto HDR status not set!");
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+ const char *prev_str = get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setMCEValue(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setDISValue
+ *
+ * DESCRIPTION: enable/disable DIS from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_DIS);
+ const char *prev_str = get(KEY_QC_DIS);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setDISValue(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_LENSSHADE);
+ const char *prev_str = get(KEY_QC_LENSSHADE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setLensShadeValue(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_FOCUS_AREAS);
+
+ if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
+ // Ignore focus areas for secondary camera
+ LOGH("Ignore focus areas for secondary camera!! ");
+ return NO_ERROR;
+ }
+ if (str != NULL) {
+ int max_num_af_areas = getInt(KEY_MAX_NUM_FOCUS_AREAS);
+ if(max_num_af_areas == 0) {
+ LOGE("max num of AF area is 0, cannot set focus areas");
+ return BAD_VALUE;
+ }
+
+ const char *prev_str = get(KEY_FOCUS_AREAS);
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setFocusAreas(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_METERING_AREAS);
+ if (str != NULL) {
+ int max_num_mtr_areas = getInt(KEY_MAX_NUM_METERING_AREAS);
+ if(max_num_mtr_areas == 0) {
+ LOGE("max num of metering areas is 0, cannot set focus areas");
+ return BAD_VALUE;
+ }
+
+ const char *prev_str = get(KEY_METERING_AREAS);
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0 ||
+ (m_bNeedRestart == true)) {
+ return setMeteringAreas(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSceneMode
+ *
+ * DESCRIPTION: set scenen mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_SCENE_MODE);
+ const char *prev_str = get(KEY_SCENE_MODE);
+ LOGH("str - %s, prev_str - %s", str, prev_str);
+
+ // HDR & Recording are mutually exclusive and so disable HDR if recording hint is set
+ if (m_bRecordingHint_new && m_bHDREnabled) {
+ LOGH("Disable the HDR and set it to Auto");
+ str = SCENE_MODE_AUTO;
+ m_bLocalHDREnabled = true;
+ } else if (!m_bRecordingHint_new && m_bLocalHDREnabled) {
+ LOGH("Restore the HDR from Auto scene mode");
+ str = SCENE_MODE_HDR;
+ m_bLocalHDREnabled = false;
+ }
+
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+
+ if(strcmp(str, SCENE_MODE_AUTO) == 0) {
+ m_bSceneTransitionAuto = true;
+ }
+ if (strcmp(str, SCENE_MODE_HDR) == 0) {
+
+ // If HDR is set from client and the feature is not enabled in the backend, ignore it.
+ if (m_bHDRModeSensor && isSupportedSensorHdrSize(params)) {
+ m_bSensorHDREnabled = true;
+ LOGH("Sensor HDR mode Enabled");
+ } else {
+ m_bHDREnabled = true;
+ LOGH("S/W HDR Enabled");
+ }
+ } else {
+ m_bHDREnabled = false;
+ if (m_bSensorHDREnabled) {
+ m_bSensorHDREnabled = false;
+ m_bNeedRestart = true;
+ setSensorSnapshotHDR("off");
+ }
+ }
+
+ if (m_bSensorHDREnabled) {
+ setSensorSnapshotHDR("on");
+ m_bNeedRestart = true;
+ } else if ((m_bHDREnabled) ||
+ ((prev_str != NULL) && (strcmp(prev_str, SCENE_MODE_HDR) == 0))) {
+ LOGH("scene mode changed between HDR and non-HDR, need restart");
+ m_bNeedRestart = true;
+ }
+
+ return setSceneMode(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone auto focus value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_SELECTABLE_ZONE_AF);
+ const char *prev_str = get(KEY_QC_SELECTABLE_ZONE_AF);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setSelectableZoneAf(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const QCameraParameters& params)
+{
+ if (isHDREnabled()) {
+ LOGH("scene mode is HDR, overwrite AE bracket setting to off");
+ return setAEBracket(AE_BRACKET_OFF);
+ }
+
+ const char *expStr = params.get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+ if (NULL != expStr && strlen(expStr) > 0) {
+ set(KEY_QC_CAPTURE_BURST_EXPOSURE, expStr);
+ } else {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.capture.burst.exposures", prop, "");
+ if (strlen(prop) > 0) {
+ set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+ } else {
+ remove(KEY_QC_CAPTURE_BURST_EXPOSURE);
+ }
+ }
+
+ const char *str = params.get(KEY_QC_AE_BRACKET_HDR);
+ const char *prev_str = get(KEY_QC_AE_BRACKET_HDR);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setAEBracket(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setAFBracket
+ *
+ * DESCRIPTION: set AF bracket from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAFBracket(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ (CAM_QCOM_FEATURE_REFOCUS | CAM_QCOM_FEATURE_UBIFOCUS)) == 0) {
+ LOGH("AF Bracketing is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_AF_BRACKET);
+ const char *prev_str = get(KEY_QC_AF_BRACKET);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setAFBracket(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setReFocus
+ *
+ * DESCRIPTION: set refocus from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setReFocus(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ (CAM_QCOM_FEATURE_REFOCUS | CAM_QCOM_FEATURE_UBIFOCUS)) == 0) {
+ LOGD("AF Bracketing is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_RE_FOCUS);
+ const char *prev_str = get(KEY_QC_RE_FOCUS);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setReFocus(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setChromaFlash
+ *
+ * DESCRIPTION: set chroma flash from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setChromaFlash(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_CHROMA_FLASH) == 0) {
+ LOGH("Chroma Flash is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_CHROMA_FLASH);
+ const char *prev_str = get(KEY_QC_CHROMA_FLASH);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setChromaFlash(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setOptiZoom
+ *
+ * DESCRIPTION: set opti zoom from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOptiZoom(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_OPTIZOOM) == 0){
+ LOGH("Opti Zoom is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_OPTI_ZOOM);
+ const char *prev_str = get(KEY_QC_OPTI_ZOOM);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setOptiZoom(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setTruePortrait
+ *
+ * DESCRIPTION: set true portrait from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTruePortrait(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_TRUEPORTRAIT) == 0) {
+ LOGD("True Portrait is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_TRUE_PORTRAIT);
+ const char *prev_str = get(KEY_QC_TRUE_PORTRAIT);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setTruePortrait(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setHDRMode
+ *
+ * DESCRIPTION: set HDR mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_HDR_MODE);
+ const char *prev_str = get(KEY_QC_HDR_MODE);
+ uint32_t supported_hdr_modes = m_pCapability->qcom_supported_feature_mask &
+ (CAM_QCOM_FEATURE_SENSOR_HDR | CAM_QCOM_FEATURE_HDR);
+
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if ((CAM_QCOM_FEATURE_SENSOR_HDR == supported_hdr_modes) &&
+ (strncmp(str, HDR_MODE_SENSOR, strlen(HDR_MODE_SENSOR)))) {
+ LOGH("Only sensor HDR is supported");
+ return NO_ERROR;
+ } else if ((CAM_QCOM_FEATURE_HDR == supported_hdr_modes) &&
+ (strncmp(str, HDR_MODE_SENSOR, strlen(HDR_MODE_MULTI_FRAME)))) {
+ LOGH("Only multi frame HDR is supported");
+ return NO_ERROR;
+ } else if (!supported_hdr_modes) {
+ LOGH("HDR is not supported");
+ return NO_ERROR;
+ }
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setHDRMode(str);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setHDRNeed1x
+ *
+ * DESCRIPTION: set HDR need 1x from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRNeed1x(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_HDR_NEED_1X);
+ const char *prev_str = get(KEY_QC_HDR_NEED_1X);
+
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (m_bHDRModeSensor) {
+ LOGH("Only multi frame HDR supports 1x frame");
+ return NO_ERROR;
+ }
+ if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+ return setHDRNeed1x(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setQuadraCfaMode
+ *
+ * DESCRIPTION: enable or disable Quadra CFA mode
+ *
+ * PARAMETERS :
+ * @enable : enable: 1; disable: 0
+ * @initCommit: if configuration list needs to be initialized and commited
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setQuadraCfaMode(uint32_t enable, bool initCommit) {
+
+ int32_t rc = NO_ERROR;
+
+ if (getQuadraCfa()) {
+ if (enable) {
+ setOfflineRAW(TRUE);
+ } else {
+ setOfflineRAW(FALSE);
+ }
+ if (initCommit) {
+ if (initBatchUpdate(m_pParamBuf) < 0) {
+ LOGE("Failed to initialize group update table");
+ return FAILED_TRANSACTION;
+ }
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_QUADRA_CFA, enable)) {
+ LOGE("Failed to update Quadra CFA mode");
+ return BAD_VALUE;
+ }
+ if (initCommit) {
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit Quadra CFA mode");
+ return rc;
+ }
+ }
+ }
+ LOGI("Quadra CFA mode %d ", enable);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setQuadraCFA
+ *
+ * DESCRIPTION: set Quadra CFA mode
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setQuadraCfa(const QCameraParameters& params)
+{
+
+ int32_t width = 0,height = 0;
+ bool prev_quadracfa = getQuadraCfa();
+ int32_t rc = NO_ERROR;
+ int32_t value;
+
+ if (!m_pCapability->is_remosaic_lib_present) {
+ LOGD("Quadra CFA mode not supported");
+ return rc;
+ }
+
+ /*Checking if the user selected dim is more than maximum dim supported by
+ Quadra sensor in normal mode. If more then switch to Quadra CFA mode else
+ remain in normal zsl mode */
+ params.getPictureSize(&width, &height);
+ if (width > m_pCapability->raw_dim[0].width &&
+ height > m_pCapability->raw_dim[0].height) {
+ LOGI("Quadra CFA mode selected");
+ m_bQuadraCfa = TRUE;
+ } else {
+ LOGI("Quadra CFA mode not selected");
+ m_bQuadraCfa = FALSE;
+ }
+ value = m_bQuadraCfa;
+ if (prev_quadracfa == m_bQuadraCfa) {
+ LOGD("No change in Quadra CFA mode");
+ } else {
+ if (m_bZslMode && m_bQuadraCfa) {
+ m_bNeedRestart = TRUE;
+ setZslMode(FALSE);
+ } else {
+ const char *str_val = params.get(KEY_QC_ZSL);
+ int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+ str_val);
+ if (value != NAME_NOT_FOUND && value) {
+ rc = setZslMode(value);
+ // ZSL mode changed, need to restart preview
+ m_bNeedRestart = true;
+ }
+ }
+ }
+ LOGH("Quadra CFA mode = %d", m_bQuadraCfa);
+ return rc;
+}
+/*===========================================================================
+ * FUNCTION : setSeeMore
+ *
+ * DESCRIPTION: set see more (llvd) from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSeeMore(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_LLVD) == 0) {
+ LOGD("See more is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_SEE_MORE);
+ const char *prev_str = get(KEY_QC_SEE_MORE);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setSeeMore(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setNoiseReductionMode
+ *
+ * DESCRIPTION: set noise reduction mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoiseReductionMode(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QTI_FEATURE_SW_TNR) == 0) {
+ LOGD("SW TNR is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_NOISE_REDUCTION_MODE);
+ const char *prev_str = get(KEY_QC_NOISE_REDUCTION_MODE);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setNoiseReductionMode(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setStillMore
+ *
+ * DESCRIPTION: set stillmore from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStillMore(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_STILLMORE) == 0) {
+ LOGD("Stillmore is not supported");
+ return NO_ERROR;
+ }
+ const char *str = params.get(KEY_QC_STILL_MORE);
+ const char *prev_str = get(KEY_QC_STILL_MORE);
+ LOGH("str =%s & prev_str =%s", str, prev_str);
+ if (str != NULL) {
+ if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+ m_bNeedRestart = true;
+ return setStillMore(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction setting from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_REDEYE_REDUCTION);
+ const char *prev_str = get(KEY_QC_REDEYE_REDUCTION);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setRedeyeReduction(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setGpsLocation
+ *
+ * DESCRIPTION: set GPS location information from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setGpsLocation(const QCameraParameters& params)
+{
+ const char *method = params.get(KEY_GPS_PROCESSING_METHOD);
+ if (method) {
+ set(KEY_GPS_PROCESSING_METHOD, method);
+ }else {
+ remove(KEY_GPS_PROCESSING_METHOD);
+ }
+
+ const char *latitude = params.get(KEY_GPS_LATITUDE);
+ if (latitude) {
+ set(KEY_GPS_LATITUDE, latitude);
+ }else {
+ remove(KEY_GPS_LATITUDE);
+ }
+
+ const char *latitudeRef = params.get(KEY_QC_GPS_LATITUDE_REF);
+ if (latitudeRef) {
+ set(KEY_QC_GPS_LATITUDE_REF, latitudeRef);
+ }else {
+ remove(KEY_QC_GPS_LATITUDE_REF);
+ }
+
+ const char *longitude = params.get(KEY_GPS_LONGITUDE);
+ if (longitude) {
+ set(KEY_GPS_LONGITUDE, longitude);
+ }else {
+ remove(KEY_GPS_LONGITUDE);
+ }
+
+ const char *longitudeRef = params.get(KEY_QC_GPS_LONGITUDE_REF);
+ if (longitudeRef) {
+ set(KEY_QC_GPS_LONGITUDE_REF, longitudeRef);
+ }else {
+ remove(KEY_QC_GPS_LONGITUDE_REF);
+ }
+
+ const char *altitudeRef = params.get(KEY_QC_GPS_ALTITUDE_REF);
+ if (altitudeRef) {
+ set(KEY_QC_GPS_ALTITUDE_REF, altitudeRef);
+ }else {
+ remove(KEY_QC_GPS_ALTITUDE_REF);
+ }
+
+ const char *altitude = params.get(KEY_GPS_ALTITUDE);
+ if (altitude) {
+ set(KEY_GPS_ALTITUDE, altitude);
+ }else {
+ remove(KEY_GPS_ALTITUDE);
+ }
+
+ const char *status = params.get(KEY_QC_GPS_STATUS);
+ if (status) {
+ set(KEY_QC_GPS_STATUS, status);
+ } else {
+ remove(KEY_QC_GPS_STATUS);
+ }
+
+ const char *timestamp = params.get(KEY_GPS_TIMESTAMP);
+ if (timestamp) {
+ set(KEY_GPS_TIMESTAMP, timestamp);
+ }else {
+ remove(KEY_GPS_TIMESTAMP);
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setNumOfSnapshot
+ *
+ * DESCRIPTION: set number of snapshot per shutter from user setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNumOfSnapshot()
+{
+ int nBurstNum = 1;
+ int nExpnum = 0;
+
+ const char *bracket_str = get(KEY_QC_AE_BRACKET_HDR);
+ if (bracket_str != NULL && strlen(bracket_str) > 0) {
+ int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+ bracket_str);
+ switch (value) {
+ case CAM_EXP_BRACKETING_ON:
+ {
+ nExpnum = 0;
+ const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+ if ((str_val != NULL) && (strlen(str_val) > 0)) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ strlcpy(prop, str_val, PROPERTY_VALUE_MAX);
+ char *saveptr = NULL;
+ char *token = strtok_r(prop, ",", &saveptr);
+ while (token != NULL) {
+ token = strtok_r(NULL, ",", &saveptr);
+ nExpnum++;
+ }
+ }
+ if (nExpnum == 0) {
+ nExpnum = 1;
+ }
+ }
+ break;
+ default:
+ nExpnum = 1 + getNumOfExtraHDROutBufsIfNeeded();
+ break;
+ }
+ }
+
+ if (isUbiRefocus()) {
+ nBurstNum = m_pCapability->refocus_af_bracketing_need.output_count + 1;
+ }
+
+ LOGH("nBurstNum = %d, nExpnum = %d", nBurstNum, nExpnum);
+ set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, nBurstNum * nExpnum);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setRecordingHint
+ *
+ * DESCRIPTION: set recording hint value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRecordingHint(const QCameraParameters& params)
+{
+ const char * str = params.get(KEY_RECORDING_HINT);
+ const char *prev_str = get(KEY_RECORDING_HINT);
+ if (str != NULL) {
+ if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+ int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+ str);
+ if(value != NAME_NOT_FOUND){
+ updateParamEntry(KEY_RECORDING_HINT, str);
+ setRecordingHintValue(value);
+ if (getFaceDetectionOption() == true) {
+ if (!fdModeInVideo()) {
+ setFaceDetection(value > 0 ? false : true, false);
+ } else {
+ setFaceDetection(true, false);
+ }
+ }
+ if (m_bDISEnabled) {
+ LOGH("Setting DIS value again");
+ setDISValue(VALUE_ENABLE);
+ }
+ return NO_ERROR;
+ } else {
+ LOGE("Invalid recording hint value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setNoDisplayMode
+ *
+ * DESCRIPTION: set no display mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoDisplayMode(const QCameraParameters& params)
+{
+ const char *str_val = params.get(KEY_QC_NO_DISPLAY_MODE);
+ const char *prev_str = get(KEY_QC_NO_DISPLAY_MODE);
+ char prop[PROPERTY_VALUE_MAX];
+ LOGD("str_val: %s, prev_str: %s", str_val, prev_str);
+
+ // Aux Camera Mode, set no display mode
+ if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+ if (!m_bNoDisplayMode) {
+ set(KEY_QC_NO_DISPLAY_MODE, 1);
+ m_bNoDisplayMode = true;
+ m_bNeedRestart = true;
+ }
+ return NO_ERROR;
+ }
+
+ if(str_val && strlen(str_val) > 0) {
+ if (prev_str == NULL || strcmp(str_val, prev_str) != 0) {
+ m_bNoDisplayMode = atoi(str_val);
+ set(KEY_QC_NO_DISPLAY_MODE, str_val);
+ m_bNeedRestart = true;
+ }
+ } else {
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.no-display", prop, "0");
+ m_bNoDisplayMode = atoi(prop);
+ }
+ LOGH("Param m_bNoDisplayMode = %d", m_bNoDisplayMode);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setZslMode
+ *
+ * DESCRIPTION: set ZSL mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslMode(const QCameraParameters& params)
+{
+ const char *str_val = params.get(KEY_QC_ZSL);
+ const char *prev_val = get(KEY_QC_ZSL);
+ int32_t rc = NO_ERROR;
+
+ if(m_bForceZslMode) {
+ if (!m_bZslMode) {
+ // Force ZSL mode to ON
+ set(KEY_QC_ZSL, VALUE_ON);
+ setZslMode(TRUE);
+ LOGH("ZSL Mode forced to be enabled");
+ }
+ } else if (str_val != NULL) {
+ if (prev_val == NULL || strcmp(str_val, prev_val) != 0) {
+ int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+ str_val);
+ if (value != NAME_NOT_FOUND) {
+ set(KEY_QC_ZSL, str_val);
+ rc = setZslMode(value);
+ // ZSL mode changed, need restart preview
+ m_bNeedRestart = true;
+ } else {
+ LOGE("Invalid ZSL mode value: %s", str_val);
+ rc = BAD_VALUE;
+ }
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setZslMode
+ *
+ * DESCRIPTION: set ZSL mode from user setting
+ *
+ * PARAMETERS :
+ * @value : ZSL mode value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslMode(bool value)
+{
+ int32_t rc = NO_ERROR;
+ if(m_bForceZslMode) {
+ if (!m_bZslMode) {
+ // Force ZSL mode to ON
+ set(KEY_QC_ZSL, VALUE_ON);
+ m_bZslMode_new = true;
+ m_bZslMode = true;
+ m_bNeedRestart = true;
+
+ int32_t value = m_bForceZslMode;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZSL_MODE, value)) {
+ rc = BAD_VALUE;
+ }
+
+ LOGI("ZSL Mode forced to be enabled");
+ }
+ } else {
+ LOGI("ZSL Mode -> %s", m_bZslMode_new ? "Enabled" : "Disabled");
+ m_bZslMode_new = (value > 0)? true : false;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZSL_MODE, value)) {
+ rc = BAD_VALUE;
+ }
+ }
+ LOGH("enabled: %d rc = %d", m_bZslMode_new, rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : updateZSLModeValue
+ *
+ * DESCRIPTION: update zsl mode value locally and to daemon
+ *
+ * PARAMETERS :
+ * @value : zsl mode value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateZSLModeValue(bool value)
+{
+ int32_t rc = NO_ERROR;
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ rc = setZslMode(value);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to ZSL value");
+ return rc;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to update recording hint");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_DENOISE);
+ const char *prev_str = get(KEY_QC_DENOISE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setWaveletDenoise(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setTemporalDenoise
+ *
+ * DESCRIPTION: set temporal denoise value from properties
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTemporalDenoise(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_CPP_TNR) == 0) {
+ LOGH("TNR is not supported");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(KEY_QC_TNR_MODE);
+ const char *prev_str = get(KEY_QC_TNR_MODE);
+ const char *video_str = params.get(KEY_QC_VIDEO_TNR_MODE);
+ const char *video_prev_str = get(KEY_QC_VIDEO_TNR_MODE);
+ char video_value[PROPERTY_VALUE_MAX];
+ char preview_value[PROPERTY_VALUE_MAX];
+ bool prev_video_tnr = m_bTNRVideoOn;
+ bool prev_preview_tnr = m_bTNRPreviewOn;
+ bool prev_snap_tnr = m_bTNRSnapshotOn;
+
+ char value[PROPERTY_VALUE_MAX];
+ memset(value, 0, sizeof(value));
+ property_get("persist.camera.tnr_cds", value, "0");
+ uint8_t tnr_cds = (uint8_t)atoi(value);
+
+ if (m_bRecordingHint_new == true) {
+ if (video_str) {
+ if ((video_prev_str == NULL) || (strcmp(video_str, video_prev_str) != 0)) {
+ if (!strcmp(video_str, VALUE_ON)) {
+ m_bTNRVideoOn = true;
+ m_bTNRPreviewOn = true;
+ } else {
+ m_bTNRVideoOn = false;
+ m_bTNRPreviewOn = false;
+ }
+ updateParamEntry(KEY_QC_VIDEO_TNR_MODE, video_str);
+ } else {
+ return NO_ERROR;
+ }
+ }
+ } else {
+ if (str) {
+ if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+ if (!strcmp(str, VALUE_ON)) {
+ m_bTNRPreviewOn = true;
+ } else {
+ m_bTNRPreviewOn = false;
+ }
+ updateParamEntry(KEY_QC_TNR_MODE, str);
+ } else {
+ return NO_ERROR;
+ }
+ }
+ }
+
+ //Read setprops only if UI is not present or disabled.
+ if ((m_bRecordingHint_new == true)
+ && ((video_str == NULL)
+ || (strcmp(video_str, VALUE_ON)))) {
+ memset(video_value, 0, sizeof(video_value));
+ property_get("persist.camera.tnr.video", video_value, VALUE_OFF);
+ if (!strcmp(video_value, VALUE_ON)) {
+ m_bTNRVideoOn = true;
+ } else {
+ m_bTNRVideoOn = false;
+ }
+ updateParamEntry(KEY_QC_VIDEO_TNR_MODE, video_value);
+
+ memset(preview_value, 0, sizeof(preview_value));
+ property_get("persist.camera.tnr.preview", preview_value, VALUE_OFF);
+ if (!strcmp(preview_value, VALUE_ON)) {
+ m_bTNRPreviewOn = true;
+ } else {
+ m_bTNRPreviewOn = false;
+ }
+ updateParamEntry(KEY_QC_TNR_MODE, preview_value);
+ } else if ((m_bRecordingHint_new != true)
+ && ((str == NULL) || (strcmp(str, VALUE_ON)))) {
+ memset(preview_value, 0, sizeof(preview_value));
+ property_get("persist.camera.tnr.preview", preview_value, VALUE_OFF);
+ if (!strcmp(preview_value, VALUE_ON)) {
+ m_bTNRPreviewOn = true;
+ } else {
+ m_bTNRPreviewOn = false;
+ }
+ updateParamEntry(KEY_QC_TNR_MODE, preview_value);
+ }
+
+ memset(value, 0, sizeof(value));
+ property_get("persist.camera.tnr.snapshot", value, VALUE_OFF);
+ if (!strcmp(value, VALUE_ON)) {
+ m_bTNRSnapshotOn = true;
+ LOGD("TNR enabled for SNAPSHOT stream");
+ } else {
+ m_bTNRSnapshotOn = false;
+ }
+
+ cam_denoise_param_t temp;
+ memset(&temp, 0, sizeof(temp));
+ if (m_bTNRVideoOn || m_bTNRPreviewOn || m_bTNRSnapshotOn) {
+ temp.denoise_enable = 1;
+ temp.process_plates = getDenoiseProcessPlate(
+ CAM_INTF_PARM_TEMPORAL_DENOISE);
+
+ if (!tnr_cds) {
+ int32_t cds_mode = lookupAttr(CDS_MODES_MAP,
+ PARAM_MAP_SIZE(CDS_MODES_MAP), CDS_MODE_OFF);
+
+ if (cds_mode != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_VIDEO_CDS_MODE, CDS_MODE_OFF);
+ if (m_bTNRPreviewOn) {
+ updateParamEntry(KEY_QC_CDS_MODE, CDS_MODE_OFF);
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+ LOGE("Failed CDS MODE to update table");
+ return BAD_VALUE;
+ }
+ LOGD("CDS is set to = %s when TNR is enabled",
+ CDS_MODE_OFF);
+ mCds_mode = cds_mode;
+ } else {
+ LOGE("Invalid argument for video CDS MODE %d",
+ cds_mode);
+ }
+ } else {
+ LOGH("Enabled TNR with CDS");
+ }
+ }
+
+ if ((m_bTNRVideoOn != prev_video_tnr)
+ || (m_bTNRPreviewOn != prev_preview_tnr)
+ || (prev_snap_tnr != m_bTNRSnapshotOn)) {
+ LOGD("TNR enabled = %d, plates = %d",
+ temp.denoise_enable, temp.process_plates);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_TEMPORAL_DENOISE, temp)) {
+ return BAD_VALUE;
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setCameraMode
+ *
+ * DESCRIPTION: set camera mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCameraMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_CAMERA_MODE);
+ if (str != NULL) {
+ set(KEY_QC_CAMERA_MODE, str);
+ } else {
+ remove(KEY_QC_CAMERA_MODE);
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSceneSelectionMode
+ *
+ * DESCRIPTION: set scene selection mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneSelectionMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_SCENE_SELECTION);
+ const char *prev_str = get(KEY_QC_SCENE_SELECTION);
+ if (NULL != str) {
+ if ((NULL == prev_str) || (strcmp(str, prev_str) != 0)) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+ if (value != NAME_NOT_FOUND) {
+ LOGD("Setting selection value %s", str);
+ if (value && m_bZslMode_new) {
+ updateParamEntry(KEY_QC_SCENE_SELECTION, str);
+ m_bNeedRestart = true;
+ m_bSceneSelection = true;
+ } else if (!value) {
+ updateParamEntry(KEY_QC_SCENE_SELECTION, str);
+ m_bNeedRestart = true;
+ m_bSceneSelection = false;
+ } else {
+ LOGE("Trying to enable scene selection in non ZSL mode!!!");
+ return BAD_VALUE;
+ }
+ } else {
+ LOGE("Trying to configure invalid scene selection value: %s",
+ str);
+ return BAD_VALUE;
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSelectedScene
+ *
+ * DESCRIPTION: select specific scene
+ *
+ * PARAMETERS :
+ * @scene : scene mode
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectedScene(cam_scene_mode_type scene)
+{
+ Mutex::Autolock l(m_SceneSelectLock);
+ m_SelectedScene = scene;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getSelectedScene
+ *
+ * DESCRIPTION: get selected scene
+ *
+ * PARAMETERS :
+ *
+ * RETURN : currently selected scene
+ *==========================================================================*/
+cam_scene_mode_type QCameraParameters::getSelectedScene()
+{
+ Mutex::Autolock l(m_SceneSelectLock);
+ return m_SelectedScene;
+}
+
+/*==========================================================
+ * FUNCTION : setRdiMode
+ *
+ * DESCRIPTION: set Rdi mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *===========================================================*/
+int32_t QCameraParameters::setRdiMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_RDI_MODE);
+ const char *prev_str = get(KEY_QC_RDI_MODE);
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+
+ property_get("persist.camera.rdi.mode", prop, VALUE_DISABLE);
+ if ((str != NULL) && (prev_str == NULL || strcmp(str, prev_str) != 0)) {
+ LOGD("RDI mode set to %s", str);
+ setRdiMode(str);
+ } else if (prev_str == NULL || strcmp(prev_str, prop) != 0 ) {
+ LOGD("RDI mode set to prop: %s", prop);
+ setRdiMode(prop);
+ }
+ return NO_ERROR;
+}
+
+/*==========================================================
+ * FUNCTION : setSecureMode
+ *
+ * DESCRIPTION: set secure mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *===========================================================*/
+
+int32_t QCameraParameters::setSecureMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_SECURE_MODE);
+ const char *prev_str = get(KEY_QC_SECURE_MODE);
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+
+ property_get("persist.camera.secure.mode", prop, VALUE_DISABLE);
+ if ((str != NULL) && (prev_str == NULL || strcmp(str, prev_str) != 0)) {
+ LOGD("Secure mode set to KEY: %s", str);
+ setSecureMode(str);
+ } else if (prev_str == NULL || strcmp(prev_str, prop) != 0 ) {
+ LOGD("Secure mode set to prop: %s", prop);
+ setSecureMode(prop);
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setZslAttributes
+ *
+ * DESCRIPTION: set ZSL related attributes from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslAttributes(const QCameraParameters& params)
+{
+ // TODO: may switch to pure param instead of sysprop
+ char prop[PROPERTY_VALUE_MAX];
+
+ const char *str = params.get(KEY_QC_ZSL_BURST_INTERVAL);
+ if (str != NULL) {
+ set(KEY_QC_ZSL_BURST_INTERVAL, str);
+ } else {
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.interval", prop, "1");
+ set(KEY_QC_ZSL_BURST_INTERVAL, prop);
+ LOGH("burst interval: %s", prop);
+ }
+
+ str = params.get(KEY_QC_ZSL_BURST_LOOKBACK);
+ if (str != NULL) {
+ set(KEY_QC_ZSL_BURST_LOOKBACK, str);
+ } else {
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.backlookcnt", prop, "2");
+ uint32_t look_back_cnt = atoi(prop);
+ if (m_bFrameSyncEnabled) {
+ look_back_cnt += EXTRA_FRAME_SYNC_BUFFERS;
+ }
+ set(KEY_QC_ZSL_BURST_LOOKBACK, look_back_cnt);
+ LOGH("look back count: %s", prop);
+ }
+
+ str = params.get(KEY_QC_ZSL_QUEUE_DEPTH);
+ if (str != NULL) {
+ set(KEY_QC_ZSL_QUEUE_DEPTH, str);
+ } else {
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.queuedepth", prop, "2");
+ uint32_t queue_depth = atoi(prop);
+ if (m_bFrameSyncEnabled) {
+ queue_depth += EXTRA_FRAME_SYNC_BUFFERS;
+ }
+ set(KEY_QC_ZSL_QUEUE_DEPTH, queue_depth);
+ LOGH("queue depth: %s", prop);
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFlip
+ *
+ * DESCRIPTION: set preview/ video/ picture flip mode from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlip(const QCameraParameters& params)
+{
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) == 0) {
+ LOGH("flip is not supported.");
+ return NO_ERROR;
+ }
+
+ //check preview flip setting
+ const char *str = params.get(KEY_QC_PREVIEW_FLIP);
+ const char *prev_val = get(KEY_QC_PREVIEW_FLIP);
+ if(str != NULL){
+ if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+ int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+ if(value != NAME_NOT_FOUND){
+ set(KEY_QC_PREVIEW_FLIP, str);
+ m_bPreviewFlipChanged = true;
+ }
+ }
+ }
+
+ // check video filp setting
+ str = params.get(KEY_QC_VIDEO_FLIP);
+ prev_val = get(KEY_QC_VIDEO_FLIP);
+ if(str != NULL){
+ if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+ int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+ if(value != NAME_NOT_FOUND){
+ set(KEY_QC_VIDEO_FLIP, str);
+ m_bVideoFlipChanged = true;
+ }
+ }
+ }
+
+ // check picture filp setting
+ str = params.get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+ prev_val = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+ if(str != NULL){
+ if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+ int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+ if(value != NAME_NOT_FOUND){
+ set(KEY_QC_SNAPSHOT_PICTURE_FLIP, str);
+ m_bSnapshotFlipChanged = true;
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSnapshotFDReq
+ *
+ * DESCRIPTION: set requirement of Face Detection Metadata in Snapshot mode.
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSnapshotFDReq(const QCameraParameters& params)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ const char *str = params.get(KEY_QC_SNAPSHOT_FD_DATA);
+
+ if(str != NULL){
+ set(KEY_QC_SNAPSHOT_FD_DATA, str);
+ }else{
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snapshot.fd", prop, "0");
+ set(KEY_QC_SNAPSHOT_FD_DATA, prop);
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setMobicat
+ *
+ * DESCRIPTION: set Mobicat on/off.
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMobicat(const QCameraParameters& )
+{
+ char value [PROPERTY_VALUE_MAX];
+ property_get("persist.camera.mobicat", value, "0");
+ int32_t ret = NO_ERROR;
+ uint8_t enableMobi = (uint8_t)atoi(value);
+
+ if (enableMobi) {
+ tune_cmd_t tune_cmd;
+ tune_cmd.type = 2;
+ tune_cmd.module = 0;
+ tune_cmd.value = 1;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SET_VFE_COMMAND, tune_cmd)) {
+ return BAD_VALUE;
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SET_PP_COMMAND, tune_cmd)) {
+ ret = BAD_VALUE;
+ }
+ }
+ m_MobiMask = enableMobi;
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : setLongshotParam
+ *
+ * DESCRIPTION: set Longshot on/off.
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLongshotParam(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_LONG_SHOT);
+ const char *prev_str = get(KEY_QC_LONG_SHOT);
+
+ if (str != NULL) {
+ if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+ set(KEY_QC_LONG_SHOT, str);
+ if (prev_str && !strcmp(str, "off") && !strcmp(prev_str, "on")) {
+ // We restart here, to reset the FPS and no
+ // of buffers as per the requirement of single snapshot usecase.
+ // Here restart happens when continuous shot is changed to off from on.
+ // In case of continuous shot on, restart is taken care when actual
+ // longshot command is triggered through sendCommand.
+ m_bNeedRestart = true;
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : checkFeatureConcurrency
+ *
+ * DESCRIPTION: check if there is a feature concurrency issue with advanced
+ * camera features
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::checkFeatureConcurrency()
+{
+ int32_t rc = NO_ERROR;
+ uint32_t advancedFeatEnableBit = 0;
+
+ if (isStillMoreEnabled()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_STILLMORE;
+ }
+ if (isHDREnabled()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_HDR;
+ }
+ if (isChromaFlashEnabled()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_CHROMA_FLASH;
+ }
+ if (isUbiFocusEnabled()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_UBIFOCUS;
+ }
+ if (isTruePortraitEnabled()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_TRUEPORTRAIT;
+ }
+ if (isOptiZoomEnabled()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_OPTIZOOM;
+ }
+ if (isUbiRefocus()) {
+ advancedFeatEnableBit |= CAM_QCOM_FEATURE_REFOCUS;
+ }
+
+ if (m_bLongshotEnabled && advancedFeatEnableBit) {
+ LOGE("Failed Longshot mode bit 0x%x",
+ advancedFeatEnableBit);
+ rc = BAD_TYPE;
+ return rc;
+ }
+
+ if(m_bRecordingHint_new) {
+ advancedFeatEnableBit &= ~CAM_QCOM_FEATURE_STILLMORE;
+
+ if (advancedFeatEnableBit) {
+ LOGE("Failed recording mode bit 0x%x",
+ advancedFeatEnableBit);
+ rc = BAD_TYPE;
+ }
+ } else if (m_bZslMode_new) {
+ /* ZSL mode check if 2 bits are set */
+ if (advancedFeatEnableBit & (advancedFeatEnableBit - 1)) {
+ LOGE("Failed ZSL mode bit 0x%x", advancedFeatEnableBit);
+ rc = BAD_TYPE;
+ }
+ } else { /* non-ZSL mode */
+ advancedFeatEnableBit &= ~CAM_QCOM_FEATURE_HDR;
+
+ /* non-ZSL mode check if 1 bit is set */
+ if (advancedFeatEnableBit) {
+ LOGE("Failed non-ZSL mode bit 0x%x", advancedFeatEnableBit);
+ rc = BAD_TYPE;
+ }
+ }
+ LOGI("Advance feature enabled 0x%x", advancedFeatEnableBit);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : updateParameters
+ *
+ * DESCRIPTION: update parameters from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ * @needRestart : [output] if preview need restart upon setting changes
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParameters(const String8& p,
+ bool &needRestart)
+{
+ int32_t final_rc = NO_ERROR;
+ int32_t rc;
+ m_bNeedRestart = false;
+ QCameraParameters params(p);
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ rc = BAD_TYPE;
+ goto UPDATE_PARAM_DONE;
+ }
+
+ if ((rc = setPreviewSize(params))) final_rc = rc;
+ if ((rc = setVideoSize(params))) final_rc = rc;
+ if ((rc = setPictureSize(params))) final_rc = rc;
+ if ((rc = setPreviewFormat(params))) final_rc = rc;
+ if ((rc = setPictureFormat(params))) final_rc = rc;
+ if ((rc = setJpegQuality(params))) final_rc = rc;
+ if ((rc = setOrientation(params))) final_rc = rc;
+ if ((rc = setRotation(params))) final_rc = rc;
+ if ((rc = setVideoRotation(params))) final_rc = rc;
+ if ((rc = setNoDisplayMode(params))) final_rc = rc;
+ if ((rc = setZslMode(params))) final_rc = rc;
+ if ((rc = setZslAttributes(params))) final_rc = rc;
+ if ((rc = setCameraMode(params))) final_rc = rc;
+ if ((rc = setSceneSelectionMode(params))) final_rc = rc;
+ if ((rc = setRecordingHint(params))) final_rc = rc;
+ if ((rc = setRdiMode(params))) final_rc = rc;
+ if ((rc = setSecureMode(params))) final_rc = rc;
+ if ((rc = setPreviewFrameRate(params))) final_rc = rc;
+ if ((rc = setPreviewFpsRange(params))) final_rc = rc;
+ if ((rc = setAutoExposure(params))) final_rc = rc;
+ if ((rc = setEffect(params))) final_rc = rc;
+ if ((rc = setBrightness(params))) final_rc = rc;
+ if ((rc = setZoom(params))) final_rc = rc;
+ if ((rc = setSharpness(params))) final_rc = rc;
+ if ((rc = setSaturation(params))) final_rc = rc;
+ if ((rc = setContrast(params))) final_rc = rc;
+ if ((rc = setFocusMode(params))) final_rc = rc;
+ if ((rc = setISOValue(params))) final_rc = rc;
+ if ((rc = setContinuousISO(params))) final_rc = rc;
+ if ((rc = setExposureTime(params))) final_rc = rc;
+ if ((rc = setSkinToneEnhancement(params))) final_rc = rc;
+ if ((rc = setFlash(params))) final_rc = rc;
+ if ((rc = setAecLock(params))) final_rc = rc;
+ if ((rc = setAwbLock(params))) final_rc = rc;
+ if ((rc = setLensShadeValue(params))) final_rc = rc;
+ if ((rc = setMCEValue(params))) final_rc = rc;
+ if ((rc = setDISValue(params))) final_rc = rc;
+ if ((rc = setAntibanding(params))) final_rc = rc;
+ if ((rc = setExposureCompensation(params))) final_rc = rc;
+ if ((rc = setWhiteBalance(params))) final_rc = rc;
+ if ((rc = setHDRMode(params))) final_rc = rc;
+ if ((rc = setHDRNeed1x(params))) final_rc = rc;
+ if ((rc = setManualWhiteBalance(params))) final_rc = rc;
+ if ((rc = setSceneMode(params))) final_rc = rc;
+ if ((rc = setFocusAreas(params))) final_rc = rc;
+ if ((rc = setFocusPosition(params))) final_rc = rc;
+ if ((rc = setMeteringAreas(params))) final_rc = rc;
+ if ((rc = setSelectableZoneAf(params))) final_rc = rc;
+ if ((rc = setRedeyeReduction(params))) final_rc = rc;
+ if ((rc = setAEBracket(params))) final_rc = rc;
+ if ((rc = setAutoHDR(params))) final_rc = rc;
+ if ((rc = setGpsLocation(params))) final_rc = rc;
+ if ((rc = setWaveletDenoise(params))) final_rc = rc;
+ if ((rc = setFaceRecognition(params))) final_rc = rc;
+ if ((rc = setFlip(params))) final_rc = rc;
+ if ((rc = setVideoHDR(params))) final_rc = rc;
+ if ((rc = setVtEnable(params))) final_rc = rc;
+ if ((rc = setAFBracket(params))) final_rc = rc;
+ if ((rc = setReFocus(params))) final_rc = rc;
+ if ((rc = setChromaFlash(params))) final_rc = rc;
+ if ((rc = setTruePortrait(params))) final_rc = rc;
+ if ((rc = setOptiZoom(params))) final_rc = rc;
+ if ((rc = setBurstLEDOnPeriod(params))) final_rc = rc;
+ if ((rc = setRetroActiveBurstNum(params))) final_rc = rc;
+ if ((rc = setSnapshotFDReq(params))) final_rc = rc;
+ if ((rc = setTintlessValue(params))) final_rc = rc;
+ if ((rc = setCDSMode(params))) final_rc = rc;
+ if ((rc = setTemporalDenoise(params))) final_rc = rc;
+ if ((rc = setCacheVideoBuffers(params))) final_rc = rc;
+ if ((rc = setInitialExposureIndex(params))) final_rc = rc;
+ if ((rc = setInstantCapture(params))) final_rc = rc;
+ if ((rc = setInstantAEC(params))) final_rc = rc;
+
+ // update live snapshot size after all other parameters are set
+ if ((rc = setLiveSnapshotSize(params))) final_rc = rc;
+ if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+ if ((rc = setStatsDebugMask())) final_rc = rc;
+ if ((rc = setPAAF())) final_rc = rc;
+ if ((rc = setMobicat(params))) final_rc = rc;
+ if ((rc = setSeeMore(params))) final_rc = rc;
+ if ((rc = setStillMore(params))) final_rc = rc;
+ if ((rc = setCustomParams(params))) final_rc = rc;
+ if ((rc = setNoiseReductionMode(params))) final_rc = rc;
+
+ if ((rc = setLongshotParam(params))) final_rc = rc;
+ if ((rc = setDualLedCalibration(params))) final_rc = rc;
+
+ setQuadraCfa(params);
+ setVideoBatchSize();
+ setLowLightCapture();
+
+ if ((rc = updateFlash(false))) final_rc = rc;
+
+#ifdef TARGET_TS_MAKEUP
+ if (params.get(KEY_TS_MAKEUP) != NULL) {
+ set(KEY_TS_MAKEUP,params.get(KEY_TS_MAKEUP));
+ final_rc = rc;
+ }
+ if (params.get(KEY_TS_MAKEUP_WHITEN) != NULL) {
+ set(KEY_TS_MAKEUP_WHITEN,params.get(KEY_TS_MAKEUP_WHITEN));
+ final_rc = rc;
+ }
+ if (params.get(KEY_TS_MAKEUP_CLEAN) != NULL) {
+ set(KEY_TS_MAKEUP_CLEAN,params.get(KEY_TS_MAKEUP_CLEAN));
+ final_rc = rc;
+ }
+#endif
+
+ if ((rc = setAdvancedCaptureMode())) final_rc = rc;
+UPDATE_PARAM_DONE:
+ needRestart = m_bNeedRestart;
+ return final_rc;
+}
+
+/*===========================================================================
+ * FUNCTION : commitParameters
+ *
+ * DESCRIPTION: commit parameter changes to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParameters()
+{
+ return commitSetBatch();
+}
+
+/*===========================================================================
+ * FUNCTION : initDefaultParameters
+ *
+ * DESCRIPTION: initialize default parameters for the first time
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initDefaultParameters()
+{
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ int32_t hal_version = CAM_HAL_V1;
+ ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HAL_VERSION, hal_version);
+
+ /*************************Initialize Values******************************/
+ // Set read only parameters from camera capability
+ set(KEY_SMOOTH_ZOOM_SUPPORTED,
+ m_pCapability->smooth_zoom_supported? VALUE_TRUE : VALUE_FALSE);
+ set(KEY_ZOOM_SUPPORTED,
+ m_pCapability->zoom_supported? VALUE_TRUE : VALUE_FALSE);
+ set(KEY_VIDEO_SNAPSHOT_SUPPORTED,
+ m_pCapability->video_snapshot_supported? VALUE_TRUE : VALUE_FALSE);
+ set(KEY_VIDEO_STABILIZATION_SUPPORTED,
+ m_pCapability->video_stablization_supported? VALUE_TRUE : VALUE_FALSE);
+ set(KEY_AUTO_EXPOSURE_LOCK_SUPPORTED,
+ m_pCapability->auto_exposure_lock_supported? VALUE_TRUE : VALUE_FALSE);
+ set(KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED,
+ m_pCapability->auto_wb_lock_supported? VALUE_TRUE : VALUE_FALSE);
+ set(KEY_MAX_NUM_DETECTED_FACES_HW, m_pCapability->max_num_roi);
+ set(KEY_MAX_NUM_DETECTED_FACES_SW, m_pCapability->max_num_roi);
+ set(KEY_QC_MAX_NUM_REQUESTED_FACES, m_pCapability->max_num_roi);
+ // Set focal length, horizontal view angle, and vertical view angle
+ setFloat(KEY_FOCAL_LENGTH, m_pCapability->focal_length);
+ setFloat(KEY_HORIZONTAL_VIEW_ANGLE, m_pCapability->hor_view_angle);
+ setFloat(KEY_VERTICAL_VIEW_ANGLE, m_pCapability->ver_view_angle);
+ set(QCameraParameters::KEY_FOCUS_DISTANCES, "Infinity,Infinity,Infinity");
+ set(KEY_QC_AUTO_HDR_SUPPORTED,
+ (m_pCapability->auto_hdr_supported)? VALUE_TRUE : VALUE_FALSE);
+ // Set supported preview sizes
+ if (m_pCapability->preview_sizes_tbl_cnt > 0 &&
+ m_pCapability->preview_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+ String8 previewSizeValues = createSizesString(
+ m_pCapability->preview_sizes_tbl, m_pCapability->preview_sizes_tbl_cnt);
+ set(KEY_SUPPORTED_PREVIEW_SIZES, previewSizeValues.string());
+ LOGH("supported preview sizes: %s", previewSizeValues.string());
+ // Set default preview size
+ CameraParameters::setPreviewSize(m_pCapability->preview_sizes_tbl[0].width,
+ m_pCapability->preview_sizes_tbl[0].height);
+ } else {
+ LOGW("supported preview sizes cnt is 0 or exceeds max!!!");
+ }
+
+ // Set supported video sizes
+ if (m_pCapability->video_sizes_tbl_cnt > 0 &&
+ m_pCapability->video_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+ String8 videoSizeValues = createSizesString(
+ m_pCapability->video_sizes_tbl, m_pCapability->video_sizes_tbl_cnt);
+ set(KEY_SUPPORTED_VIDEO_SIZES, videoSizeValues.string());
+ LOGH("supported video sizes: %s", videoSizeValues.string());
+ // Set default video size
+ CameraParameters::setVideoSize(m_pCapability->video_sizes_tbl[0].width,
+ m_pCapability->video_sizes_tbl[0].height);
+
+ //Set preferred Preview size for video
+ String8 vSize = createSizesString(&m_pCapability->preview_sizes_tbl[0], 1);
+ set(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, vSize.string());
+ } else {
+ LOGW("supported video sizes cnt is 0 or exceeds max!!!");
+ }
+
+ // Set supported picture sizes
+ if (m_pCapability->picture_sizes_tbl_cnt > 0 &&
+ m_pCapability->picture_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+ String8 pictureSizeValues = createSizesString(
+ m_pCapability->picture_sizes_tbl, m_pCapability->picture_sizes_tbl_cnt);
+ set(KEY_SUPPORTED_PICTURE_SIZES, pictureSizeValues.string());
+ LOGH("supported pic sizes: %s", pictureSizeValues.string());
+ // Set default picture size to the smallest resolution
+ CameraParameters::setPictureSize(
+ m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].width,
+ m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].height);
+ } else {
+ LOGW("supported picture sizes cnt is 0 or exceeds max!!!");
+ }
+
+ // Need check if scale should be enabled
+ if (m_pCapability->scale_picture_sizes_cnt > 0 &&
+ m_pCapability->scale_picture_sizes_cnt <= MAX_SCALE_SIZES_CNT){
+ //get scale size, enable scaling. And re-set picture size table with scale sizes
+ m_reprocScaleParam.setScaleEnable(true);
+ int rc_s = m_reprocScaleParam.setScaleSizeTbl(
+ m_pCapability->scale_picture_sizes_cnt, m_pCapability->scale_picture_sizes,
+ m_pCapability->picture_sizes_tbl_cnt, m_pCapability->picture_sizes_tbl);
+ if(rc_s == NO_ERROR){
+ cam_dimension_t *totalSizeTbl = m_reprocScaleParam.getTotalSizeTbl();
+ size_t totalSizeCnt = m_reprocScaleParam.getTotalSizeTblCnt();
+ String8 pictureSizeValues = createSizesString(totalSizeTbl, totalSizeCnt);
+ set(KEY_SUPPORTED_PICTURE_SIZES, pictureSizeValues.string());
+ LOGH("scaled supported pic sizes: %s", pictureSizeValues.string());
+ }else{
+ m_reprocScaleParam.setScaleEnable(false);
+ LOGW("reset scaled picture size table failed.");
+ }
+ }else{
+ m_reprocScaleParam.setScaleEnable(false);
+ }
+
+ // Set supported thumbnail sizes
+ String8 thumbnailSizeValues = createSizesString(
+ THUMBNAIL_SIZES_MAP,
+ PARAM_MAP_SIZE(THUMBNAIL_SIZES_MAP));
+ set(KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, thumbnailSizeValues.string());
+ // Set default thumnail size
+ set(KEY_JPEG_THUMBNAIL_WIDTH, THUMBNAIL_SIZES_MAP[0].width);
+ set(KEY_JPEG_THUMBNAIL_HEIGHT, THUMBNAIL_SIZES_MAP[0].height);
+
+ // Set supported livesnapshot sizes
+ if (m_pCapability->livesnapshot_sizes_tbl_cnt > 0 &&
+ m_pCapability->livesnapshot_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+ String8 liveSnpashotSizeValues = createSizesString(
+ m_pCapability->livesnapshot_sizes_tbl,
+ m_pCapability->livesnapshot_sizes_tbl_cnt);
+ set(KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES, liveSnpashotSizeValues.string());
+ LOGD("supported live snapshot sizes: %s", liveSnpashotSizeValues.string());
+ m_LiveSnapshotSize =
+ m_pCapability->livesnapshot_sizes_tbl[m_pCapability->livesnapshot_sizes_tbl_cnt-1];
+ }
+
+ // Set supported preview formats
+ String8 previewFormatValues = createValuesString(
+ m_pCapability->supported_preview_fmts,
+ m_pCapability->supported_preview_fmt_cnt,
+ PREVIEW_FORMATS_MAP,
+ PARAM_MAP_SIZE(PREVIEW_FORMATS_MAP));
+ set(KEY_SUPPORTED_PREVIEW_FORMATS, previewFormatValues.string());
+ // Set default preview format
+ CameraParameters::setPreviewFormat(PIXEL_FORMAT_YUV420SP);
+
+ // Set default Video Format as OPAQUE
+ // Internally both Video and Camera subsystems use NV21_VENUS
+ set(KEY_VIDEO_FRAME_FORMAT, PIXEL_FORMAT_ANDROID_OPAQUE);
+
+ // Set supported picture formats
+ String8 pictureTypeValues(PIXEL_FORMAT_JPEG);
+ String8 str = createValuesString(
+ m_pCapability->supported_raw_fmts,
+ m_pCapability->supported_raw_fmt_cnt,
+ PICTURE_TYPES_MAP,
+ PARAM_MAP_SIZE(PICTURE_TYPES_MAP));
+ if (str.string() != NULL) {
+ pictureTypeValues.append(",");
+ pictureTypeValues.append(str);
+ }
+
+ set(KEY_SUPPORTED_PICTURE_FORMATS, pictureTypeValues.string());
+ // Set default picture Format
+ CameraParameters::setPictureFormat(PIXEL_FORMAT_JPEG);
+ // Set raw image size
+ char raw_size_str[32];
+ snprintf(raw_size_str, sizeof(raw_size_str), "%dx%d",
+ m_pCapability->raw_dim[0].width, m_pCapability->raw_dim[0].height);
+ set(KEY_QC_RAW_PICUTRE_SIZE, raw_size_str);
+ LOGD("KEY_QC_RAW_PICUTRE_SIZE: w: %d, h: %d ",
+ m_pCapability->raw_dim[0].width, m_pCapability->raw_dim[0].height);
+
+ //set default jpeg quality and thumbnail quality
+ set(KEY_JPEG_QUALITY, 85);
+ set(KEY_JPEG_THUMBNAIL_QUALITY, 85);
+
+ // Set FPS ranges
+ if (m_pCapability->fps_ranges_tbl_cnt > 0 &&
+ m_pCapability->fps_ranges_tbl_cnt <= MAX_SIZES_CNT) {
+ int default_fps_index = 0;
+ String8 fpsRangeValues = createFpsRangeString(m_pCapability->fps_ranges_tbl,
+ m_pCapability->fps_ranges_tbl_cnt,
+ default_fps_index);
+ set(KEY_SUPPORTED_PREVIEW_FPS_RANGE, fpsRangeValues.string());
+
+ int min_fps =
+ int(m_pCapability->fps_ranges_tbl[default_fps_index].min_fps * 1000);
+ int max_fps =
+ int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps * 1000);
+ m_default_fps_range = m_pCapability->fps_ranges_tbl[default_fps_index];
+ //Set video fps same as preview fps
+ setPreviewFpsRange(min_fps, max_fps, min_fps, max_fps);
+
+ // Set legacy preview fps
+ String8 fpsValues = createFpsString(m_pCapability->fps_ranges_tbl[default_fps_index]);
+ set(KEY_SUPPORTED_PREVIEW_FRAME_RATES, fpsValues.string());
+ LOGH("supported fps rates: %s", fpsValues.string());
+ CameraParameters::setPreviewFrameRate(int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps));
+ } else {
+ LOGW("supported fps ranges cnt is 0 or exceeds max!!!");
+ }
+
+ // Set supported focus modes
+ if (m_pCapability->supported_focus_modes_cnt > 0) {
+ String8 focusModeValues = createValuesString(
+ m_pCapability->supported_focus_modes,
+ m_pCapability->supported_focus_modes_cnt,
+ FOCUS_MODES_MAP,
+ PARAM_MAP_SIZE(FOCUS_MODES_MAP));
+ set(KEY_SUPPORTED_FOCUS_MODES, focusModeValues);
+
+ // Set default focus mode and update corresponding parameter buf
+ const char *focusMode = lookupNameByValue(FOCUS_MODES_MAP,
+ PARAM_MAP_SIZE(FOCUS_MODES_MAP),
+ m_pCapability->supported_focus_modes[0]);
+ if (focusMode != NULL) {
+ setFocusMode(focusMode);
+ } else {
+ setFocusMode(FOCUS_MODE_FIXED);
+ }
+ } else {
+ LOGW("supported focus modes cnt is 0!!!");
+ }
+
+ // Set focus areas
+ if (m_pCapability->max_num_focus_areas > MAX_ROI) {
+ m_pCapability->max_num_focus_areas = MAX_ROI;
+ }
+ set(KEY_MAX_NUM_FOCUS_AREAS, m_pCapability->max_num_focus_areas);
+ if (m_pCapability->max_num_focus_areas > 0) {
+ setFocusAreas(DEFAULT_CAMERA_AREA);
+ }
+
+ // Set metering areas
+ if (m_pCapability->max_num_metering_areas > MAX_ROI) {
+ m_pCapability->max_num_metering_areas = MAX_ROI;
+ }
+ set(KEY_MAX_NUM_METERING_AREAS, m_pCapability->max_num_metering_areas);
+ if (m_pCapability->max_num_metering_areas > 0) {
+ setMeteringAreas(DEFAULT_CAMERA_AREA);
+ }
+
+ // set focus position, we should get them from m_pCapability
+ m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX] = 0;
+ m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX] = 1023;
+ set(KEY_QC_MIN_FOCUS_POS_INDEX,
+ (int) m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX]);
+ set(KEY_QC_MAX_FOCUS_POS_INDEX,
+ (int) m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX]);
+
+ m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE] = 0;
+ m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE] = 1023;
+ set(KEY_QC_MIN_FOCUS_POS_DAC,
+ (int) m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE]);
+ set(KEY_QC_MAX_FOCUS_POS_DAC,
+ (int) m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE]);
+
+ m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO] = 0;
+ m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO] = 100;
+ set(KEY_QC_MIN_FOCUS_POS_RATIO,
+ (int) m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO]);
+ set(KEY_QC_MAX_FOCUS_POS_RATIO,
+ (int) m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO]);
+
+ m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] = 0;
+ if (m_pCapability->min_focus_distance > 0) {
+ m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] =
+ m_pCapability->min_focus_distance;
+ } else {
+ m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] = 0;
+ }
+ setFloat(KEY_QC_MIN_FOCUS_POS_DIOPTER,
+ m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER]);
+ setFloat(KEY_QC_MAX_FOCUS_POS_DIOPTER,
+ m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER]);
+
+ //set supported manual focus modes
+ String8 manualFocusModes(VALUE_OFF);
+ if (m_pCapability->supported_focus_modes_cnt > 1 &&
+ m_pCapability->min_focus_distance > 0) {
+ manualFocusModes.append(",");
+ manualFocusModes.append(KEY_QC_FOCUS_SCALE_MODE);
+ manualFocusModes.append(",");
+ manualFocusModes.append(KEY_QC_FOCUS_DIOPTER_MODE);
+ }
+ set(KEY_QC_SUPPORTED_MANUAL_FOCUS_MODES, manualFocusModes.string());
+
+ // Set Saturation
+ set(KEY_QC_MIN_SATURATION, m_pCapability->saturation_ctrl.min_value);
+ set(KEY_QC_MAX_SATURATION, m_pCapability->saturation_ctrl.max_value);
+ set(KEY_QC_SATURATION_STEP, m_pCapability->saturation_ctrl.step);
+ setSaturation(m_pCapability->saturation_ctrl.def_value);
+
+ // Set Sharpness
+ set(KEY_QC_MIN_SHARPNESS, m_pCapability->sharpness_ctrl.min_value);
+ set(KEY_QC_MAX_SHARPNESS, m_pCapability->sharpness_ctrl.max_value);
+ set(KEY_QC_SHARPNESS_STEP, m_pCapability->sharpness_ctrl.step);
+ setSharpness(m_pCapability->sharpness_ctrl.def_value);
+
+ // Set Contrast
+ set(KEY_QC_MIN_CONTRAST, m_pCapability->contrast_ctrl.min_value);
+ set(KEY_QC_MAX_CONTRAST, m_pCapability->contrast_ctrl.max_value);
+ set(KEY_QC_CONTRAST_STEP, m_pCapability->contrast_ctrl.step);
+ setContrast(m_pCapability->contrast_ctrl.def_value);
+
+ // Set SCE factor
+ set(KEY_QC_MIN_SCE_FACTOR, m_pCapability->sce_ctrl.min_value); // -100
+ set(KEY_QC_MAX_SCE_FACTOR, m_pCapability->sce_ctrl.max_value); // 100
+ set(KEY_QC_SCE_FACTOR_STEP, m_pCapability->sce_ctrl.step); // 10
+ setSkinToneEnhancement(m_pCapability->sce_ctrl.def_value); // 0
+
+ // Set Brightness
+ set(KEY_QC_MIN_BRIGHTNESS, m_pCapability->brightness_ctrl.min_value); // 0
+ set(KEY_QC_MAX_BRIGHTNESS, m_pCapability->brightness_ctrl.max_value); // 6
+ set(KEY_QC_BRIGHTNESS_STEP, m_pCapability->brightness_ctrl.step); // 1
+ setBrightness(m_pCapability->brightness_ctrl.def_value);
+
+ // Set Auto exposure
+ String8 autoExposureValues = createValuesString(
+ m_pCapability->supported_aec_modes,
+ m_pCapability->supported_aec_modes_cnt,
+ AUTO_EXPOSURE_MAP,
+ PARAM_MAP_SIZE(AUTO_EXPOSURE_MAP));
+ set(KEY_QC_SUPPORTED_AUTO_EXPOSURE, autoExposureValues.string());
+ setAutoExposure(AUTO_EXPOSURE_FRAME_AVG);
+
+ // Set Exposure Compensation
+ set(KEY_MAX_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_max); // 12
+ set(KEY_MIN_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_min); // -12
+ setFloat(KEY_EXPOSURE_COMPENSATION_STEP, m_pCapability->exposure_compensation_step); // 1/6
+ setExposureCompensation(m_pCapability->exposure_compensation_default); // 0
+
+ // Set Instant AEC modes
+ String8 instantAECModes = createValuesString(
+ m_pCapability->supported_instant_aec_modes,
+ m_pCapability->supported_instant_aec_modes_cnt,
+ INSTANT_AEC_MODES_MAP,
+ PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP));
+ set(KEY_QC_INSTANT_AEC_SUPPORTED_MODES, instantAECModes.string());
+
+ // Set Instant Capture modes
+ String8 instantCaptureModes = createValuesString(
+ m_pCapability->supported_instant_aec_modes,
+ m_pCapability->supported_instant_aec_modes_cnt,
+ INSTANT_CAPTURE_MODES_MAP,
+ PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP));
+ set(KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES, instantCaptureModes.string());
+
+
+ // Set Antibanding
+ String8 antibandingValues = createValuesString(
+ m_pCapability->supported_antibandings,
+ m_pCapability->supported_antibandings_cnt,
+ ANTIBANDING_MODES_MAP,
+ PARAM_MAP_SIZE(ANTIBANDING_MODES_MAP));
+ set(KEY_SUPPORTED_ANTIBANDING, antibandingValues);
+ setAntibanding(ANTIBANDING_OFF);
+
+ // Set Effect
+ String8 effectValues = createValuesString(
+ m_pCapability->supported_effects,
+ m_pCapability->supported_effects_cnt,
+ EFFECT_MODES_MAP,
+ PARAM_MAP_SIZE(EFFECT_MODES_MAP));
+
+ if (m_pCapability->supported_effects_cnt > 0) {
+ set(KEY_SUPPORTED_EFFECTS, effectValues);
+ } else {
+ LOGW("Color effects are not available");
+ set(KEY_SUPPORTED_EFFECTS, EFFECT_NONE);
+ }
+ setEffect(EFFECT_NONE);
+
+ // Set WhiteBalance
+ String8 whitebalanceValues = createValuesString(
+ m_pCapability->supported_white_balances,
+ m_pCapability->supported_white_balances_cnt,
+ WHITE_BALANCE_MODES_MAP,
+ PARAM_MAP_SIZE(WHITE_BALANCE_MODES_MAP));
+ set(KEY_SUPPORTED_WHITE_BALANCE, whitebalanceValues);
+ setWhiteBalance(WHITE_BALANCE_AUTO);
+
+ // set supported wb cct, we should get them from m_pCapability
+ m_pCapability->min_wb_cct = 2000;
+ m_pCapability->max_wb_cct = 8000;
+ set(KEY_QC_MIN_WB_CCT, m_pCapability->min_wb_cct);
+ set(KEY_QC_MAX_WB_CCT, m_pCapability->max_wb_cct);
+
+ // set supported wb rgb gains, ideally we should get them from m_pCapability
+ //but for now hardcode.
+ m_pCapability->min_wb_gain = 1.0;
+ m_pCapability->max_wb_gain = 4.0;
+ setFloat(KEY_QC_MIN_WB_GAIN, m_pCapability->min_wb_gain);
+ setFloat(KEY_QC_MAX_WB_GAIN, m_pCapability->max_wb_gain);
+
+ //set supported manual wb modes
+ String8 manualWBModes(VALUE_OFF);
+ if(m_pCapability->sensor_type.sens_type != CAM_SENSOR_YUV) {
+ manualWBModes.append(",");
+ manualWBModes.append(KEY_QC_WB_CCT_MODE);
+ manualWBModes.append(",");
+ manualWBModes.append(KEY_QC_WB_GAIN_MODE);
+ }
+ set(KEY_QC_SUPPORTED_MANUAL_WB_MODES, manualWBModes.string());
+
+ // Set Flash mode
+ if(m_pCapability->supported_flash_modes_cnt > 0) {
+ String8 flashValues = createValuesString(
+ m_pCapability->supported_flash_modes,
+ m_pCapability->supported_flash_modes_cnt,
+ FLASH_MODES_MAP,
+ PARAM_MAP_SIZE(FLASH_MODES_MAP));
+ set(KEY_SUPPORTED_FLASH_MODES, flashValues);
+ setFlash(FLASH_MODE_OFF);
+ } else {
+ LOGW("supported flash modes cnt is 0!!!");
+ }
+
+ // Set Scene Mode
+ String8 sceneModeValues = createValuesString(
+ m_pCapability->supported_scene_modes,
+ m_pCapability->supported_scene_modes_cnt,
+ SCENE_MODES_MAP,
+ PARAM_MAP_SIZE(SCENE_MODES_MAP));
+ set(KEY_SUPPORTED_SCENE_MODES, sceneModeValues);
+ setSceneMode(SCENE_MODE_AUTO);
+
+ // Set CDS Mode
+ String8 cdsModeValues = createValuesStringFromMap(
+ CDS_MODES_MAP,
+ PARAM_MAP_SIZE(CDS_MODES_MAP));
+ set(KEY_QC_SUPPORTED_CDS_MODES, cdsModeValues);
+
+ // Set video CDS Mode
+ String8 videoCdsModeValues = createValuesStringFromMap(
+ CDS_MODES_MAP,
+ PARAM_MAP_SIZE(CDS_MODES_MAP));
+ set(KEY_QC_SUPPORTED_VIDEO_CDS_MODES, videoCdsModeValues);
+
+ // Set TNR Mode
+ String8 tnrModeValues = createValuesStringFromMap(
+ ON_OFF_MODES_MAP,
+ PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+ set(KEY_QC_SUPPORTED_TNR_MODES, tnrModeValues);
+
+ // Set video TNR Mode
+ String8 videoTnrModeValues = createValuesStringFromMap(
+ ON_OFF_MODES_MAP,
+ PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+ set(KEY_QC_SUPPORTED_VIDEO_TNR_MODES, videoTnrModeValues);
+
+ // Set ISO Mode
+ String8 isoValues = createValuesString(
+ m_pCapability->supported_iso_modes,
+ m_pCapability->supported_iso_modes_cnt,
+ ISO_MODES_MAP,
+ PARAM_MAP_SIZE(ISO_MODES_MAP));
+ set(KEY_QC_SUPPORTED_ISO_MODES, isoValues);
+ setISOValue(ISO_AUTO);
+
+ // Set exposure time
+ String8 manualExpModes(VALUE_OFF);
+ bool expTimeSupported = false;
+ bool manualISOSupported = false;
+ //capability values are in nano sec, convert to milli sec for upper layers
+ char expTimeStr[20];
+ double min_exp_time = (double) m_pCapability->exposure_time_range[0] / 1000000.0;
+ double max_exp_time = (double) m_pCapability->exposure_time_range[1] / 1000000.0;
+ snprintf(expTimeStr, sizeof(expTimeStr), "%f", min_exp_time);
+ set(KEY_QC_MIN_EXPOSURE_TIME, expTimeStr);
+ snprintf(expTimeStr, sizeof(expTimeStr), "%f", max_exp_time);
+ set(KEY_QC_MAX_EXPOSURE_TIME, expTimeStr);
+ if ((min_exp_time > 0) && (max_exp_time > min_exp_time)) {
+ manualExpModes.append(",");
+ manualExpModes.append(KEY_QC_EXP_TIME_PRIORITY);
+ expTimeSupported = true;
+ }
+ LOGH(", Exposure time min %f ms, max %f ms",
+ min_exp_time, max_exp_time);
+
+ // Set iso
+ set(KEY_QC_MIN_ISO, m_pCapability->sensitivity_range.min_sensitivity);
+ set(KEY_QC_MAX_ISO, m_pCapability->sensitivity_range.max_sensitivity);
+ LOGH(", ISO min %d, max %d",
+ m_pCapability->sensitivity_range.min_sensitivity,
+ m_pCapability->sensitivity_range.max_sensitivity);
+ if ((m_pCapability->sensitivity_range.min_sensitivity > 0) &&
+ (m_pCapability->sensitivity_range.max_sensitivity >
+ m_pCapability->sensitivity_range.min_sensitivity)) {
+ manualExpModes.append(",");
+ manualExpModes.append(KEY_QC_ISO_PRIORITY);
+ manualISOSupported = true;
+ }
+ if (expTimeSupported && manualISOSupported) {
+ manualExpModes.append(",");
+ manualExpModes.append(KEY_QC_USER_SETTING);
+ }
+ //finally set supported manual exposure modes
+ set(KEY_QC_SUPPORTED_MANUAL_EXPOSURE_MODES, manualExpModes.string());
+
+ // Set HFR
+ String8 hfrValues = createHfrValuesString(
+ m_pCapability->hfr_tbl,
+ m_pCapability->hfr_tbl_cnt,
+ HFR_MODES_MAP,
+ PARAM_MAP_SIZE(HFR_MODES_MAP));
+ set(KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES, hfrValues.string());
+ set(KEY_QC_VIDEO_HIGH_SPEED_RECORDING, "off");
+ set(KEY_QC_VIDEO_HIGH_FRAME_RATE, "off");
+ String8 hfrSizeValues = createHfrSizesString(
+ m_pCapability->hfr_tbl,
+ m_pCapability->hfr_tbl_cnt);
+ set(KEY_QC_SUPPORTED_HFR_SIZES, hfrSizeValues.string());
+ LOGD("HFR values = %s HFR Sizes = %s", hfrValues.string(), hfrSizeValues.string());
+ setHighFrameRate(CAM_HFR_MODE_OFF);
+
+ // Set Focus algorithms
+ String8 focusAlgoValues = createValuesString(
+ m_pCapability->supported_focus_algos,
+ m_pCapability->supported_focus_algos_cnt,
+ FOCUS_ALGO_MAP,
+ PARAM_MAP_SIZE(FOCUS_ALGO_MAP));
+ set(KEY_QC_SUPPORTED_FOCUS_ALGOS, focusAlgoValues);
+ setSelectableZoneAf(FOCUS_ALGO_AUTO);
+
+ // Set Zoom Ratios
+ if (m_pCapability->zoom_supported > 0) {
+ String8 zoomRatioValues = createZoomRatioValuesString(
+ m_pCapability->zoom_ratio_tbl,
+ m_pCapability->zoom_ratio_tbl_cnt);
+ set(KEY_ZOOM_RATIOS, zoomRatioValues);
+ set(KEY_MAX_ZOOM, (int)(m_pCapability->zoom_ratio_tbl_cnt - 1));
+ setZoom(0);
+ }
+
+ // Set Bracketing/HDR
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.capture.burst.exposures", prop, "");
+ if (strlen(prop) > 0) {
+ set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+ }
+ String8 bracketingValues = createValuesStringFromMap(
+ BRACKETING_MODES_MAP,
+ PARAM_MAP_SIZE(BRACKETING_MODES_MAP));
+ set(KEY_QC_SUPPORTED_AE_BRACKET_MODES, bracketingValues);
+ setAEBracket(AE_BRACKET_OFF);
+
+ //Set AF Bracketing.
+ for (size_t i = 0; i < m_pCapability->supported_focus_modes_cnt; i++) {
+ if ((CAM_FOCUS_MODE_AUTO == m_pCapability->supported_focus_modes[i]) &&
+ ((m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_UBIFOCUS) > 0)) {
+ String8 afBracketingValues = createValuesStringFromMap(
+ AF_BRACKETING_MODES_MAP,
+ PARAM_MAP_SIZE(AF_BRACKETING_MODES_MAP));
+ set(KEY_QC_SUPPORTED_AF_BRACKET_MODES, afBracketingValues);
+ setAFBracket(AF_BRACKET_OFF);
+ break;
+ }
+ }
+
+ //Set Refocus.
+ //Re-use ubifocus flag for now.
+ for (size_t i = 0; i < m_pCapability->supported_focus_modes_cnt; i++) {
+ if ((CAM_FOCUS_MODE_AUTO == m_pCapability->supported_focus_modes[i]) &&
+ (m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_REFOCUS) > 0) {
+ String8 reFocusValues = createValuesStringFromMap(
+ RE_FOCUS_MODES_MAP,
+ PARAM_MAP_SIZE(RE_FOCUS_MODES_MAP));
+ set(KEY_QC_SUPPORTED_RE_FOCUS_MODES, reFocusValues);
+ setReFocus(RE_FOCUS_OFF);
+ }
+ }
+
+ //Set Chroma Flash.
+ if ((m_pCapability->supported_flash_modes_cnt > 0) &&
+ (m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_CHROMA_FLASH) > 0) {
+ String8 chromaFlashValues = createValuesStringFromMap(
+ CHROMA_FLASH_MODES_MAP,
+ PARAM_MAP_SIZE(CHROMA_FLASH_MODES_MAP));
+ set(KEY_QC_SUPPORTED_CHROMA_FLASH_MODES, chromaFlashValues);
+ setChromaFlash(CHROMA_FLASH_OFF);
+ }
+
+ //Set Opti Zoom.
+ if (m_pCapability->zoom_supported &&
+ (m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_OPTIZOOM) > 0){
+ String8 optiZoomValues = createValuesStringFromMap(
+ OPTI_ZOOM_MODES_MAP,
+ PARAM_MAP_SIZE(OPTI_ZOOM_MODES_MAP));
+ set(KEY_QC_SUPPORTED_OPTI_ZOOM_MODES, optiZoomValues);
+ setOptiZoom(OPTI_ZOOM_OFF);
+ }
+
+ //Set HDR Type
+ uint32_t supported_hdr_modes = m_pCapability->qcom_supported_feature_mask &
+ (CAM_QCOM_FEATURE_SENSOR_HDR | CAM_QCOM_FEATURE_HDR);
+ if (supported_hdr_modes) {
+ if (CAM_QCOM_FEATURE_SENSOR_HDR == supported_hdr_modes) {
+ String8 hdrModeValues;
+ hdrModeValues.append(HDR_MODE_SENSOR);
+ set(KEY_QC_SUPPORTED_KEY_QC_HDR_MODES, hdrModeValues);
+ setHDRMode(HDR_MODE_SENSOR);
+ } else if (CAM_QCOM_FEATURE_HDR == supported_hdr_modes) {
+ String8 hdrModeValues;
+ hdrModeValues.append(HDR_MODE_MULTI_FRAME);
+ set(KEY_QC_SUPPORTED_KEY_QC_HDR_MODES, hdrModeValues);
+ setHDRMode(HDR_MODE_MULTI_FRAME);
+ } else {
+ String8 hdrModeValues = createValuesStringFromMap(
+ HDR_MODES_MAP,
+ PARAM_MAP_SIZE(HDR_MODES_MAP));
+ set(KEY_QC_SUPPORTED_KEY_QC_HDR_MODES, hdrModeValues);
+ setHDRMode(HDR_MODE_MULTI_FRAME);
+ }
+ }
+
+ //Set HDR need 1x
+ String8 hdrNeed1xValues;
+ if (!m_bHDRModeSensor) {
+ hdrNeed1xValues = createValuesStringFromMap(TRUE_FALSE_MODES_MAP,
+ PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP));
+ } else {
+ hdrNeed1xValues.append(VALUE_FALSE);
+ }
+ setHDRNeed1x(VALUE_FALSE);
+ set(KEY_QC_SUPPORTED_HDR_NEED_1X, hdrNeed1xValues);
+
+ //Set True Portrait
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_TRUEPORTRAIT) > 0) {
+ String8 truePortraitValues = createValuesStringFromMap(
+ TRUE_PORTRAIT_MODES_MAP,
+ PARAM_MAP_SIZE(TRUE_PORTRAIT_MODES_MAP));
+ set(KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES, truePortraitValues);
+ }
+
+ // Set Denoise
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_DENOISE2D) > 0){
+ String8 denoiseValues = createValuesStringFromMap(
+ DENOISE_ON_OFF_MODES_MAP, PARAM_MAP_SIZE(DENOISE_ON_OFF_MODES_MAP));
+ set(KEY_QC_SUPPORTED_DENOISE, denoiseValues.string());
+#ifdef DEFAULT_DENOISE_MODE_ON
+ setWaveletDenoise(DENOISE_ON);
+#else
+ setWaveletDenoise(DENOISE_OFF);
+#endif
+ }
+
+ // Set feature enable/disable
+ String8 enableDisableValues = createValuesStringFromMap(
+ ENABLE_DISABLE_MODES_MAP, PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP));
+
+ // Set Lens Shading
+ set(KEY_QC_SUPPORTED_LENSSHADE_MODES, enableDisableValues);
+ setLensShadeValue(VALUE_ENABLE);
+ // Set MCE
+ set(KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES, enableDisableValues);
+ setMCEValue(VALUE_ENABLE);
+
+ // Set DIS
+ set(KEY_QC_SUPPORTED_DIS_MODES, enableDisableValues);
+ setDISValue(VALUE_DISABLE);
+
+ // Set Histogram
+ set(KEY_QC_SUPPORTED_HISTOGRAM_MODES,
+ m_pCapability->histogram_supported ? enableDisableValues : "");
+ set(KEY_QC_HISTOGRAM, VALUE_DISABLE);
+
+ //Set Red Eye Reduction
+ set(KEY_QC_SUPPORTED_REDEYE_REDUCTION, enableDisableValues);
+ setRedeyeReduction(VALUE_DISABLE);
+
+ //Set SkinTone Enhancement
+ set(KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES, enableDisableValues);
+
+ // Enable LTM by default and disable it in HDR & SeeMore usecases
+ setToneMapMode(true, false);
+
+ // Set feature on/off
+ String8 onOffValues = createValuesStringFromMap(
+ ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+
+ //Set See more (LLVD)
+ if (m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_LLVD) {
+ set(KEY_QC_SUPPORTED_SEE_MORE_MODES, onOffValues);
+ setSeeMore(VALUE_OFF);
+ }
+
+ //Set Still more
+ if (m_pCapability->qcom_supported_feature_mask &
+ CAM_QCOM_FEATURE_STILLMORE) {
+ String8 stillMoreValues = createValuesStringFromMap(
+ STILL_MORE_MODES_MAP,
+ PARAM_MAP_SIZE(STILL_MORE_MODES_MAP));
+ set(KEY_QC_SUPPORTED_STILL_MORE_MODES, stillMoreValues);
+ setStillMore(STILL_MORE_OFF);
+ }
+
+ //Set Noise Reduction mode
+ if (m_pCapability->qcom_supported_feature_mask &
+ CAM_QTI_FEATURE_SW_TNR) {
+ String8 noiseReductionModesValues = createValuesStringFromMap(
+ NOISE_REDUCTION_MODES_MAP, PARAM_MAP_SIZE(NOISE_REDUCTION_MODES_MAP));
+ set(KEY_QC_NOISE_REDUCTION_MODE_VALUES, noiseReductionModesValues);
+ setNoiseReductionMode(VALUE_OFF);
+ }
+
+ //Set Scene Detection
+ set(KEY_QC_SUPPORTED_SCENE_DETECT, onOffValues);
+ setSceneDetect(VALUE_OFF);
+ m_bHDREnabled = false;
+ m_bHDR1xFrameEnabled = false;
+
+ m_bHDRThumbnailProcessNeeded = false;
+ m_bHDR1xExtraBufferNeeded = true;
+ for (uint32_t i=0; i<m_pCapability->hdr_bracketing_setting.num_frames; i++) {
+ if (0 == m_pCapability->hdr_bracketing_setting.exp_val.values[i]) {
+ m_bHDR1xExtraBufferNeeded = false;
+ break;
+ }
+ }
+
+ // Set HDR output scaling
+ char value[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.hdr.outcrop", value, VALUE_DISABLE);
+ if (strncmp(VALUE_ENABLE, value, sizeof(VALUE_ENABLE))) {
+ m_bHDROutputCropEnabled = false;
+ } else {
+ m_bHDROutputCropEnabled = true;
+ }
+
+ //Set Face Detection
+ set(KEY_QC_SUPPORTED_FACE_DETECTION, onOffValues);
+ set(KEY_QC_FACE_DETECTION, VALUE_OFF);
+
+ //Set Face Recognition
+ //set(KEY_QC_SUPPORTED_FACE_RECOGNITION, onOffValues);
+ //set(KEY_QC_FACE_RECOGNITION, VALUE_OFF);
+
+ //Set ZSL
+ set(KEY_QC_SUPPORTED_ZSL_MODES, onOffValues);
+#ifdef DEFAULT_ZSL_MODE_ON
+ set(KEY_QC_ZSL, VALUE_ON);
+ m_bZslMode = true;
+#else
+ set(KEY_QC_ZSL, VALUE_OFF);
+ m_bZslMode = false;
+#endif
+
+ // Check if zsl mode property is enabled.
+ // If yes, force the camera to be in zsl mode
+ // and force zsl mode to be enabled in dual camera mode.
+ memset(value, 0x0, PROPERTY_VALUE_MAX);
+ property_get("persist.camera.zsl.mode", value, "0");
+ int32_t zsl_mode = atoi(value);
+ if((zsl_mode == 1) ||
+ (m_bZslMode == true) ||
+ (m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON)) {
+ LOGH("%d: Forcing Camera to ZSL mode enabled");
+ set(KEY_QC_ZSL, VALUE_ON);
+ m_bForceZslMode = true;
+ m_bZslMode = true;
+ int32_t value = m_bForceZslMode;
+ ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZSL_MODE, value);
+ }
+ m_bZslMode_new = m_bZslMode;
+
+ set(KEY_QC_SCENE_SELECTION, VALUE_DISABLE);
+
+ // Rdi mode
+ set(KEY_QC_SUPPORTED_RDI_MODES, enableDisableValues);
+ setRdiMode(VALUE_DISABLE);
+
+ // Secure mode
+ set(KEY_QC_SUPPORTED_SECURE_MODES, enableDisableValues);
+ setSecureMode(VALUE_DISABLE);
+
+ //Set video HDR
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_VIDEO_HDR) > 0) {
+ set(KEY_QC_SUPPORTED_VIDEO_HDR_MODES, onOffValues);
+ set(KEY_QC_VIDEO_HDR, VALUE_OFF);
+ }
+
+ //Set HW Sensor Snapshot HDR
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SENSOR_HDR)> 0) {
+ set(KEY_QC_SUPPORTED_SENSOR_HDR_MODES, onOffValues);
+ set(KEY_QC_SENSOR_HDR, VALUE_OFF);
+ m_bSensorHDREnabled = false;
+ }
+
+ // Set VT TimeStamp
+ set(KEY_QC_VT_ENABLE, VALUE_DISABLE);
+ //Set Touch AF/AEC
+ String8 touchValues = createValuesStringFromMap(
+ TOUCH_AF_AEC_MODES_MAP, PARAM_MAP_SIZE(TOUCH_AF_AEC_MODES_MAP));
+
+ set(KEY_QC_SUPPORTED_TOUCH_AF_AEC, touchValues);
+ set(KEY_QC_TOUCH_AF_AEC, TOUCH_AF_AEC_OFF);
+
+ //set flip mode
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) > 0) {
+ String8 flipModes = createValuesStringFromMap(
+ FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP));
+ set(KEY_QC_SUPPORTED_FLIP_MODES, flipModes);
+ set(KEY_QC_PREVIEW_FLIP, FLIP_MODE_OFF);
+ set(KEY_QC_VIDEO_FLIP, FLIP_MODE_OFF);
+ set(KEY_QC_SNAPSHOT_PICTURE_FLIP, FLIP_MODE_OFF);
+ }
+
+ // Set default Auto Exposure lock value
+ setAecLock(VALUE_FALSE);
+
+ // Set default AWB_LOCK lock value
+ setAwbLock(VALUE_FALSE);
+
+ // Set default Camera mode
+ set(KEY_QC_CAMERA_MODE, 0);
+
+ // Add support for internal preview restart
+ set(KEY_INTERNAL_PERVIEW_RESTART, VALUE_TRUE);
+ // Set default burst number
+ set(KEY_QC_SNAPSHOT_BURST_NUM, 0);
+ set(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER, 0);
+
+ //Get RAM size and disable features which are memory rich
+ struct sysinfo info;
+ sysinfo(&info);
+
+ LOGH("totalram = %ld, freeram = %ld ", info.totalram,
+ info.freeram);
+ if (info.totalram > TOTAL_RAM_SIZE_512MB) {
+ set(KEY_QC_ZSL_HDR_SUPPORTED, VALUE_TRUE);
+ } else {
+ m_bIsLowMemoryDevice = true;
+ set(KEY_QC_ZSL_HDR_SUPPORTED, VALUE_FALSE);
+ }
+
+ setOfflineRAW();
+ memset(mStreamPpMask, 0, sizeof(cam_feature_mask_t)*CAM_STREAM_TYPE_MAX);
+ //Set video buffers as uncached by default
+ set(KEY_QC_CACHE_VIDEO_BUFFERS, VALUE_DISABLE);
+
+ // Set default longshot mode
+ set(KEY_QC_LONG_SHOT, "off");
+ //Enable longshot by default
+ set(KEY_QC_LONGSHOT_SUPPORTED, VALUE_TRUE);
+
+ int32_t rc = commitParameters();
+ if (rc == NO_ERROR) {
+ rc = setNumOfSnapshot();
+ }
+
+ //Set Video Rotation
+ String8 videoRotationValues = createValuesStringFromMap(VIDEO_ROTATION_MODES_MAP,
+ PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP));
+
+ set(KEY_QC_SUPPORTED_VIDEO_ROTATION_VALUES, videoRotationValues.string());
+ set(KEY_QC_VIDEO_ROTATION, VIDEO_ROTATION_0);
+
+ //Check for EZTune
+ setEztune();
+ //Default set for video batch size
+ set(KEY_QC_VIDEO_BATCH_SIZE, 0);
+
+ //Setup dual-camera
+ setDcrf();
+
+ // For Aux Camera of dual camera Mode,
+ // by default set no display mode
+ if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+ set(KEY_QC_NO_DISPLAY_MODE, 1);
+ m_bNoDisplayMode = true;
+ }
+
+ cam_dimension_t pic_dim;
+ pic_dim.width = 0;
+ pic_dim.height = 0;
+
+ for(uint32_t i = 0;
+ i < (m_pCapability->picture_sizes_tbl_cnt - 1);
+ i++) {
+ if ((pic_dim.width * pic_dim.height) <
+ (int32_t)(m_pCapability->picture_sizes_tbl[i].width *
+ m_pCapability->picture_sizes_tbl[i].height)) {
+ pic_dim.width =
+ m_pCapability->picture_sizes_tbl[i].width;
+ pic_dim.height =
+ m_pCapability->picture_sizes_tbl[i].height;
+ }
+ }
+ LOGD("max pic size = %d %d", pic_dim.width,
+ pic_dim.height);
+ setMaxPicSize(pic_dim);
+
+ setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : allocate
+ *
+ * DESCRIPTION: Allocate buffer memory for parameter obj (if necessary)
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::allocate()
+{
+ int32_t rc = NO_ERROR;
+
+ if (m_pParamHeap != NULL) {
+ return rc;
+ }
+
+ //Allocate Set Param Buffer
+ m_pParamHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+ if (m_pParamHeap == NULL) {
+ return NO_MEMORY;
+ }
+
+ rc = m_pParamHeap->allocate(1, sizeof(parm_buffer_t), NON_SECURE);
+ if(rc != OK) {
+ rc = NO_MEMORY;
+ LOGE("Error!! Param buffers have not been allocated");
+ delete m_pParamHeap;
+ m_pParamHeap = NULL;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : init
+ *
+ * DESCRIPTION: initialize parameter obj
+ *
+ * PARAMETERS :
+ * @capabilities : ptr to camera capabilities
+ * @mmops : ptr to memory ops table for mapping/unmapping
+ * @adjustFPS : object reference for additional (possibly thermal)
+ * framerate adjustment
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::init(cam_capability_t *capabilities,
+ mm_camera_vtbl_t *mmOps, QCameraAdjustFPS *adjustFPS)
+{
+ int32_t rc = NO_ERROR;
+
+ m_pCapability = capabilities;
+ m_pCamOpsTbl = mmOps;
+ m_AdjustFPS = adjustFPS;
+
+ if (m_pParamHeap == NULL) {
+ LOGE("Parameter buffers have not been allocated");
+ rc = UNKNOWN_ERROR;
+ goto TRANS_INIT_ERROR1;
+ }
+
+ //Map memory for parameters buffer
+ cam_buf_map_type_list bufMapList;
+ rc = QCameraBufferMaps::makeSingletonBufMapList(
+ CAM_MAPPING_BUF_TYPE_PARM_BUF, 0 /*stream id*/,
+ 0 /*buffer index*/, -1 /*plane index*/, 0 /*cookie*/,
+ m_pParamHeap->getFd(0), sizeof(parm_buffer_t), bufMapList);
+
+ if (rc == NO_ERROR) {
+ rc = m_pCamOpsTbl->ops->map_bufs(m_pCamOpsTbl->camera_handle,
+ &bufMapList);
+ }
+
+ if(rc < 0) {
+ LOGE("failed to map SETPARM buffer");
+ rc = FAILED_TRANSACTION;
+ goto TRANS_INIT_ERROR2;
+ }
+ m_pParamBuf = (parm_buffer_t*) DATA_PTR(m_pParamHeap,0);
+
+ // Check if it is dual camera mode
+ if(m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ //Allocate related cam sync buffer
+ //this is needed for the payload that goes along with bundling cmd for related
+ //camera use cases
+ m_pRelCamSyncHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+ rc = m_pRelCamSyncHeap->allocate(1,
+ sizeof(cam_sync_related_sensors_event_info_t), NON_SECURE);
+ if(rc != OK) {
+ rc = NO_MEMORY;
+ LOGE("Failed to allocate Related cam sync Heap memory");
+ goto TRANS_INIT_ERROR3;
+ }
+
+ //Map memory for related cam sync buffer
+ rc = m_pCamOpsTbl->ops->map_buf(m_pCamOpsTbl->camera_handle,
+ CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
+ m_pRelCamSyncHeap->getFd(0),
+ sizeof(cam_sync_related_sensors_event_info_t));
+ if(rc < 0) {
+ LOGE("failed to map Related cam sync buffer");
+ rc = FAILED_TRANSACTION;
+ goto TRANS_INIT_ERROR4;
+ }
+ m_pRelCamSyncBuf =
+ (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
+ }
+
+ initDefaultParameters();
+
+ mCommon.init(capabilities);
+
+ m_bInited = true;
+
+ goto TRANS_INIT_DONE;
+
+TRANS_INIT_ERROR4:
+ m_pRelCamSyncHeap->deallocate();
+
+TRANS_INIT_ERROR3:
+ delete m_pRelCamSyncHeap;
+ m_pRelCamSyncHeap = NULL;
+
+TRANS_INIT_ERROR2:
+ m_pParamHeap->deallocate();
+ delete m_pParamHeap;
+ m_pParamHeap = NULL;
+
+TRANS_INIT_ERROR1:
+ m_pCapability = NULL;
+ m_pCamOpsTbl = NULL;
+ m_AdjustFPS = NULL;
+
+TRANS_INIT_DONE:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : deinit
+ *
+ * DESCRIPTION: deinitialize
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::deinit()
+{
+ if (!m_bInited) {
+ return;
+ }
+
+ //clear all entries in the map
+ String8 emptyStr;
+ QCameraParameters::unflatten(emptyStr);
+
+ if ((NULL != m_pCamOpsTbl) && (m_pCamOpsTbl->ops != NULL)) {
+ m_pCamOpsTbl->ops->unmap_buf(
+ m_pCamOpsTbl->camera_handle,
+ CAM_MAPPING_BUF_TYPE_PARM_BUF);
+
+ if (m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ m_pCamOpsTbl->ops->unmap_buf(
+ m_pCamOpsTbl->camera_handle,
+ CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF);
+ }
+ }
+
+ m_pCapability = NULL;
+ if (NULL != m_pParamHeap) {
+ m_pParamHeap->deallocate();
+ delete m_pParamHeap;
+ m_pParamHeap = NULL;
+ m_pParamBuf = NULL;
+ }
+ if (NULL != m_pRelCamSyncHeap) {
+ m_pRelCamSyncHeap->deallocate();
+ delete m_pRelCamSyncHeap;
+ m_pRelCamSyncHeap = NULL;
+ m_pRelCamSyncBuf = NULL;
+ }
+
+ m_AdjustFPS = NULL;
+ m_tempMap.clear();
+ m_pCamOpsTbl = NULL;
+ m_AdjustFPS = NULL;
+
+ m_bInited = false;
+}
+
+/*===========================================================================
+ * FUNCTION : parse_pair
+ *
+ * DESCRIPTION: helper function to parse string like "640x480" or "10000,20000"
+ *
+ * PARAMETERS :
+ * @str : input string to be parse
+ * @first : [output] first value of the pair
+ * @second : [output] second value of the pair
+ * @delim : [input] delimeter to seperate the pair
+ * @endptr : [output] ptr to the end of the pair string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parse_pair(const char *str,
+ int *first,
+ int *second,
+ char delim,
+ char **endptr = NULL)
+{
+ // Find the first integer.
+ char *end;
+ int w = (int)strtol(str, &end, 10);
+ // If a delimeter does not immediately follow, give up.
+ if (*end != delim) {
+ LOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+ return BAD_VALUE;
+ }
+
+ // Find the second integer, immediately after the delimeter.
+ int h = (int)strtol(end+1, &end, 10);
+
+ *first = w;
+ *second = h;
+
+ if (endptr) {
+ *endptr = end;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : parseSizesList
+ *
+ * DESCRIPTION: helper function to parse string containing sizes
+ *
+ * PARAMETERS :
+ * @sizesStr: [input] input string to be parse
+ * @sizes : [output] reference to store parsed sizes
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::parseSizesList(const char *sizesStr, Vector<Size> &sizes)
+{
+ if (sizesStr == 0) {
+ return;
+ }
+
+ char *sizeStartPtr = (char *)sizesStr;
+
+ while (true) {
+ int width, height;
+ int success = parse_pair(sizeStartPtr, &width, &height, 'x',
+ &sizeStartPtr);
+ if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) {
+ LOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+ return;
+ }
+ sizes.push(Size(width, height));
+
+ if (*sizeStartPtr == '\0') {
+ return;
+ }
+ sizeStartPtr++;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : adjustPreviewFpsRange
+ *
+ * DESCRIPTION: adjust preview FPS ranges
+ * according to external events
+ *
+ * PARAMETERS :
+ * @minFPS : min FPS value
+ * @maxFPS : max FPS value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::adjustPreviewFpsRange(cam_fps_range_t *fpsRange)
+{
+ if ( fpsRange == NULL ) {
+ return BAD_VALUE;
+ }
+
+ if ( m_pParamBuf == NULL ) {
+ return NO_INIT;
+ }
+
+ int32_t rc = initBatchUpdate(m_pParamBuf);
+ if ( rc != NO_ERROR ) {
+ LOGE("Failed to initialize group update table");
+ return rc;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FPS_RANGE, *fpsRange)) {
+ LOGE("Parameters batch failed");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if ( rc != NO_ERROR ) {
+ LOGE("Failed to commit batch parameters");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setPreviewFpsRanges
+ *
+ * DESCRIPTION: set preview FPS ranges
+ *
+ * PARAMETERS :
+ * @minFPS : min FPS value
+ * @maxFPS : max FPS value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(int min_fps,
+ int max_fps, int vid_min_fps,int vid_max_fps)
+{
+ char str[32];
+ char value[PROPERTY_VALUE_MAX];
+ int fixedFpsValue;
+ /*This property get value should be the fps that user needs*/
+ property_get("persist.debug.set.fixedfps", value, "0");
+ fixedFpsValue = atoi(value);
+
+ LOGD("E minFps = %d, maxFps = %d , vid minFps = %d, vid maxFps = %d",
+ min_fps, max_fps, vid_min_fps, vid_max_fps);
+
+ if(fixedFpsValue != 0) {
+ min_fps = max_fps = fixedFpsValue*1000;
+ if (!isHfrMode()) {
+ vid_min_fps = vid_max_fps = fixedFpsValue*1000;
+ }
+ }
+ snprintf(str, sizeof(str), "%d,%d", min_fps, max_fps);
+ LOGH("Setting preview fps range %s", str);
+ updateParamEntry(KEY_PREVIEW_FPS_RANGE, str);
+ cam_fps_range_t fps_range;
+ memset(&fps_range, 0x00, sizeof(cam_fps_range_t));
+ fps_range.min_fps = (float)min_fps / 1000.0f;
+ fps_range.max_fps = (float)max_fps / 1000.0f;
+ fps_range.video_min_fps = (float)vid_min_fps / 1000.0f;
+ fps_range.video_max_fps = (float)vid_max_fps / 1000.0f;
+
+ LOGH("Updated: minFps = %d, maxFps = %d ,"
+ " vid minFps = %d, vid maxFps = %d",
+ min_fps, max_fps, vid_min_fps, vid_max_fps);
+
+ if ( NULL != m_AdjustFPS ) {
+ if (m_ThermalMode == QCAMERA_THERMAL_ADJUST_FPS &&
+ !m_bRecordingHint) {
+ float minVideoFps = min_fps, maxVideoFps = max_fps;
+ if (isHfrMode()) {
+ minVideoFps = m_hfrFpsRange.video_min_fps;
+ maxVideoFps = m_hfrFpsRange.video_max_fps;
+ }
+ m_AdjustFPS->recalcFPSRange(min_fps, max_fps, minVideoFps, maxVideoFps, fps_range);
+ LOGH("Thermal adjusted Preview fps range %3.2f,%3.2f, %3.2f, %3.2f",
+ fps_range.min_fps, fps_range.max_fps,
+ fps_range.video_min_fps, fps_range.video_max_fps);
+ }
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure
+ *
+ * PARAMETERS :
+ * @autoExp : auto exposure value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const char *autoExp)
+{
+ if (autoExp != NULL) {
+ int32_t value = lookupAttr(AUTO_EXPOSURE_MAP, PARAM_MAP_SIZE(AUTO_EXPOSURE_MAP), autoExp);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting auto exposure %s", autoExp);
+ updateParamEntry(KEY_QC_AUTO_EXPOSURE, autoExp);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AEC_ALGO_TYPE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid auto exposure value: %s", (autoExp == NULL) ? "NULL" : autoExp);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setEffect
+ *
+ * DESCRIPTION: set effect
+ *
+ * PARAMETERS :
+ * @effect : effect value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const char *effect)
+{
+ if (effect != NULL) {
+ int32_t value = lookupAttr(EFFECT_MODES_MAP, PARAM_MAP_SIZE(EFFECT_MODES_MAP), effect);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting effect %s", effect);
+ updateParamEntry(KEY_EFFECT, effect);
+ uint8_t prmEffect = static_cast<uint8_t>(value);
+ mParmEffect = prmEffect;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_EFFECT, prmEffect)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid effect value: %s", (effect == NULL) ? "NULL" : effect);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setBrightness
+ *
+ * DESCRIPTION: set brightness control value
+ *
+ * PARAMETERS :
+ * @brightness : brightness control value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(int brightness)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", brightness);
+ updateParamEntry(KEY_QC_BRIGHTNESS, val);
+
+ LOGH("Setting brightness %s", val);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BRIGHTNESS, brightness)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFocusMode
+ *
+ * DESCRIPTION: set focus mode
+ *
+ * PARAMETERS :
+ * @focusMode : focus mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const char *focusMode)
+{
+ if (focusMode != NULL) {
+ int32_t value = lookupAttr(FOCUS_MODES_MAP, PARAM_MAP_SIZE(FOCUS_MODES_MAP), focusMode);
+ if (value != NAME_NOT_FOUND) {
+ int32_t rc = NO_ERROR;
+ LOGH("Setting focus mode %s", focusMode);
+ mFocusMode = (cam_focus_mode_type)value;
+
+ updateParamEntry(KEY_FOCUS_MODE, focusMode);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_FOCUS_MODE, (uint8_t)value)) {
+ rc = BAD_VALUE;
+ }
+ if (strcmp(focusMode,"infinity")==0){
+ set(QCameraParameters::KEY_FOCUS_DISTANCES, "Infinity,Infinity,Infinity");
+ }
+ return rc;
+ }
+ }
+ LOGE("Invalid focus mode value: %s", (focusMode == NULL) ? "NULL" : focusMode);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setFocusPosition
+ *
+ * DESCRIPTION: set focus position
+ *
+ * PARAMETERS :
+ * @typeStr : focus position type, index or dac_code
+ * @posStr : focus positon.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusPosition(const char *typeStr, const char *posStr)
+{
+ LOGH(", type:%s, pos: %s", typeStr, posStr);
+ int32_t type = atoi(typeStr);
+ float pos = (float) atof(posStr);
+
+ if ((type >= CAM_MANUAL_FOCUS_MODE_INDEX) &&
+ (type < CAM_MANUAL_FOCUS_MODE_MAX)) {
+ // get max and min focus position from m_pCapability
+ float minFocusPos = m_pCapability->min_focus_pos[type];
+ float maxFocusPos = m_pCapability->max_focus_pos[type];
+ LOGH(", focusPos min: %f, max: %f", minFocusPos, maxFocusPos);
+
+ if (pos >= minFocusPos && pos <= maxFocusPos) {
+ updateParamEntry(KEY_QC_MANUAL_FOCUS_POS_TYPE, typeStr);
+ updateParamEntry(KEY_QC_MANUAL_FOCUS_POSITION, posStr);
+
+ cam_manual_focus_parm_t manual_focus;
+ manual_focus.flag = (cam_manual_focus_mode_type)type;
+ if (manual_focus.flag == CAM_MANUAL_FOCUS_MODE_DIOPTER) {
+ manual_focus.af_manual_diopter = pos;
+ } else if (manual_focus.flag == CAM_MANUAL_FOCUS_MODE_RATIO) {
+ manual_focus.af_manual_lens_position_ratio = (int32_t) pos;
+ } else if (manual_focus.flag == CAM_MANUAL_FOCUS_MODE_INDEX) {
+ manual_focus.af_manual_lens_position_index = (int32_t) pos;
+ } else {
+ manual_focus.af_manual_lens_position_dac = (int32_t) pos;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_MANUAL_FOCUS_POS,
+ manual_focus)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+
+ LOGE("invalid params, type:%d, pos: %f", type, pos);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : updateAEInfo
+ *
+ * DESCRIPTION: update exposure information from metadata callback
+ *
+ * PARAMETERS :
+ * @ae_params : auto exposure params
+ *
+ * RETURN : void
+ *==========================================================================*/
+void QCameraParameters::updateAEInfo(cam_3a_params_t &ae_params)
+{
+ const char *prevExpTime = get(KEY_QC_CURRENT_EXPOSURE_TIME);
+ char newExpTime[15];
+ snprintf(newExpTime, sizeof(newExpTime), "%f", ae_params.exp_time*1000.0);
+
+ if (prevExpTime == NULL || strcmp(prevExpTime, newExpTime)) {
+ LOGD("update exposure time: old: %s, new: %s", prevExpTime, newExpTime);
+ set(KEY_QC_CURRENT_EXPOSURE_TIME, newExpTime);
+ }
+
+ int32_t prevISO = getInt(KEY_QC_CURRENT_ISO);
+ int32_t newISO = ae_params.iso_value;
+ if (prevISO != newISO) {
+ LOGD("update iso: old:%d, new:%d", prevISO, newISO);
+ set(KEY_QC_CURRENT_ISO, newISO);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : updateCurrentFocusPosition
+ *
+ * DESCRIPTION: update current focus position from metadata callback
+ *
+ * PARAMETERS :
+ * @pos : current focus position
+ *
+ * RETURN : void
+ *==========================================================================*/
+void QCameraParameters::updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info)
+{
+ int prevScalePos = getInt(KEY_QC_FOCUS_POSITION_SCALE);
+ int newScalePos = (int) cur_pos_info.scale;
+ if (prevScalePos != newScalePos) {
+ LOGD("update focus scale: old:%d, new:%d", prevScalePos, newScalePos);
+ set(KEY_QC_FOCUS_POSITION_SCALE, newScalePos);
+ }
+
+ float prevDiopterPos = getFloat(KEY_QC_FOCUS_POSITION_DIOPTER);
+ float newDiopterPos = cur_pos_info.diopter;
+ if (prevDiopterPos != newDiopterPos) {
+ LOGD("update focus diopter: old:%f, new:%f", prevDiopterPos, newDiopterPos);
+ setFloat(KEY_QC_FOCUS_POSITION_DIOPTER, newDiopterPos);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value
+ *
+ * PARAMETERS :
+ * @sharpness : sharpness control value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(int sharpness)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", sharpness);
+ updateParamEntry(KEY_QC_SHARPNESS, val);
+ LOGH("Setting sharpness %s", val);
+ m_nSharpness = sharpness;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SHARPNESS, m_nSharpness)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSkinToneEnhancement
+ *
+ * DESCRIPTION: set skin tone enhancement value
+ *
+ * PARAMETERS :
+ * @sceFactore : skin tone enhancement factor value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(int sceFactor)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", sceFactor);
+ updateParamEntry(KEY_QC_SCE_FACTOR, val);
+ LOGH("Setting skintone enhancement %s", val);
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SCE_FACTOR, sceFactor)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSaturation
+ *
+ * DESCRIPTION: set saturation control value
+ *
+ * PARAMETERS :
+ * @saturation : saturation control value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(int saturation)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", saturation);
+ updateParamEntry(KEY_QC_SATURATION, val);
+ LOGH("Setting saturation %s", val);
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SATURATION, saturation)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setContrast
+ *
+ * DESCRIPTION: set contrast control value
+ *
+ * PARAMETERS :
+ * @contrast : contrast control value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(int contrast)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", contrast);
+ updateParamEntry(KEY_QC_CONTRAST, val);
+ LOGH("Setting contrast %s", val);
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CONTRAST, contrast)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value
+ *
+ * PARAMETERS :
+ * @sceneDetect : scene detect value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const char *sceneDetect)
+{
+ if (sceneDetect != NULL) {
+ int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+ sceneDetect);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting Scene Detect %s", sceneDetect);
+ updateParamEntry(KEY_QC_SCENE_DETECT, sceneDetect);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ASD_ENABLE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Scene Detect value: %s",
+ (sceneDetect == NULL) ? "NULL" : sceneDetect);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setSensorSnapshotHDR
+ *
+ * DESCRIPTION: set snapshot HDR value
+ *
+ * PARAMETERS :
+ * @snapshotHDR : snapshot HDR value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSensorSnapshotHDR(const char *snapshotHDR)
+{
+ if (snapshotHDR != NULL) {
+ int32_t value = (cam_sensor_hdr_type_t) lookupAttr(ON_OFF_MODES_MAP,
+ PARAM_MAP_SIZE(ON_OFF_MODES_MAP), snapshotHDR);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting Sensor Snapshot HDR %s", snapshotHDR);
+ updateParamEntry(KEY_QC_SENSOR_HDR, snapshotHDR);
+
+ char zz_prop[PROPERTY_VALUE_MAX];
+ memset(zz_prop, 0, sizeof(zz_prop));
+ property_get("persist.camera.zzhdr.enable", zz_prop, "0");
+ uint8_t zzhdr_enable = (uint8_t)atoi(zz_prop);
+
+ if (zzhdr_enable && (value != CAM_SENSOR_HDR_OFF)) {
+ value = CAM_SENSOR_HDR_ZIGZAG;
+ LOGH("%s: Overriding to ZZ HDR Mode", __func__);
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SENSOR_HDR, (cam_sensor_hdr_type_t)value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Snapshot HDR value: %s",
+ (snapshotHDR == NULL) ? "NULL" : snapshotHDR);
+ return BAD_VALUE;
+
+}
+
+
+/*===========================================================================
+ * FUNCTION : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value
+ *
+ * PARAMETERS :
+ * @videoHDR : svideo HDR value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const char *videoHDR)
+{
+ if (videoHDR != NULL) {
+ int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), videoHDR);
+ if (value != NAME_NOT_FOUND) {
+
+ char zz_prop[PROPERTY_VALUE_MAX];
+ memset(zz_prop, 0, sizeof(zz_prop));
+ property_get("persist.camera.zzhdr.video", zz_prop, "0");
+ uint8_t use_zzhdr_video = (uint8_t)atoi(zz_prop);
+
+ if (use_zzhdr_video) {
+ LOGH("%s: Using ZZ HDR for video mode", __func__);
+ if (value)
+ value = CAM_SENSOR_HDR_ZIGZAG;
+ else
+ value = CAM_SENSOR_HDR_OFF;
+ LOGH("%s: Overriding to sensor HDR Mode to:%d", __func__, value);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SENSOR_HDR, (cam_sensor_hdr_type_t) value)) {
+ LOGE("%s: Override to sensor HDR mode for video HDR failed", __func__);
+ return BAD_VALUE;
+ }
+ updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
+ } else {
+ LOGH("%s: Setting Video HDR %s", __func__, videoHDR);
+ updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_VIDEO_HDR, value)) {
+ return BAD_VALUE;
+ }
+ }
+
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Video HDR value: %s",
+ (videoHDR == NULL) ? "NULL" : videoHDR);
+ return BAD_VALUE;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION : setVtEnable
+ *
+ * DESCRIPTION: set vt Enable value
+ *
+ * PARAMETERS :
+ * @videoHDR : svtEnable value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVtEnable(const char *vtEnable)
+{
+ if (vtEnable != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), vtEnable);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting Vt Enable %s", vtEnable);
+ m_bAVTimerEnabled = true;
+ updateParamEntry(KEY_QC_VT_ENABLE, vtEnable);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_VT, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Vt Enable value: %s",
+ (vtEnable == NULL) ? "NULL" : vtEnable);
+ m_bAVTimerEnabled = false;
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition value
+ *
+ * PARAMETERS :
+ * @faceRecog : face recognition value string
+ * @maxFaces : number of max faces to be detected/recognized
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const char *faceRecog,
+ uint32_t maxFaces)
+{
+ if (faceRecog != NULL) {
+ int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), faceRecog);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting face recognition %s", faceRecog);
+ updateParamEntry(KEY_QC_FACE_RECOGNITION, faceRecog);
+
+ uint32_t faceProcMask = m_nFaceProcMask;
+ if (value > 0) {
+ faceProcMask |= CAM_FACE_PROCESS_MASK_RECOGNITION;
+ } else {
+ faceProcMask &= (uint32_t)(~CAM_FACE_PROCESS_MASK_RECOGNITION);
+ }
+
+ if(m_nFaceProcMask == faceProcMask) {
+ LOGH("face process mask not changed, no ops here");
+ return NO_ERROR;
+ }
+ m_nFaceProcMask = faceProcMask;
+ LOGH("FaceProcMask -> %d", m_nFaceProcMask);
+
+ // set parm for face process
+ cam_fd_set_parm_t fd_set_parm;
+ memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+ fd_set_parm.fd_mode = m_nFaceProcMask;
+ fd_set_parm.num_fd = maxFaces;
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FD, fd_set_parm)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid face recognition value: %s", (faceRecog == NULL) ? "NULL" : faceRecog);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setZoom
+ *
+ * DESCRIPTION: set zoom level
+ *
+ * PARAMETERS :
+ * @zoom_level : zoom level
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(int zoom_level)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", zoom_level);
+ updateParamEntry(KEY_ZOOM, val);
+ LOGH("zoom level: %d", zoom_level);
+ mZoomLevel = zoom_level;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZOOM, zoom_level)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setISOValue
+ *
+ * DESCRIPTION: set ISO value
+ *
+ * PARAMETERS :
+ * @isoValue : ISO value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setISOValue(const char *isoValue)
+{
+ if (isoValue != NULL) {
+ if (!strcmp(isoValue, ISO_MANUAL)) {
+ LOGD("iso manual mode - use continuous iso");
+ updateParamEntry(KEY_QC_ISO_MODE, isoValue);
+ return NO_ERROR;
+ }
+ int32_t value = lookupAttr(ISO_MODES_MAP, PARAM_MAP_SIZE(ISO_MODES_MAP), isoValue);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting ISO value %s", isoValue);
+ updateParamEntry(KEY_QC_ISO_MODE, isoValue);
+
+ cam_intf_parm_manual_3a_t iso_settings;
+ memset(&iso_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+ iso_settings.previewOnly = FALSE;
+ iso_settings.value = value;
+ if (getManualCaptureMode() != CAM_MANUAL_CAPTURE_TYPE_OFF) {
+ iso_settings.previewOnly = TRUE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ISO, iso_settings)) {
+ return BAD_VALUE;
+ }
+ m_isoValue = value;
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid ISO value: %s",
+ (isoValue == NULL) ? "NULL" : isoValue);
+ return BAD_VALUE;
+}
+
+
+/*===========================================================================
+ * FUNCTION : setContinuousISO
+ *
+ * DESCRIPTION: set continuous ISO value
+ *
+ * PARAMETERS :
+ * @params : ISO value parameter
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContinuousISO(const QCameraParameters& params)
+{
+ const char *iso = params.get(KEY_QC_ISO_MODE);
+ LOGD("current iso mode: %s", iso);
+
+ if (iso != NULL) {
+ if (strcmp(iso, ISO_MANUAL)) {
+ LOGD("dont set iso to back-end.");
+ return NO_ERROR;
+ }
+ }
+
+ const char *str = params.get(KEY_QC_CONTINUOUS_ISO);
+ const char *prev_str = get(KEY_QC_CONTINUOUS_ISO);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setContinuousISO(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setExposureTime
+ *
+ * DESCRIPTION: set exposure time
+ *
+ * PARAMETERS :
+ * @expTimeStr : string of exposure time in ms
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureTime(const char *expTimeStr)
+{
+ if (expTimeStr != NULL) {
+ double expTimeMs = atof(expTimeStr);
+ //input is in milli seconds. Convert to nano sec for backend
+ int64_t expTimeNs = (int64_t)(expTimeMs*1000000L);
+
+ // expTime == 0 means not to use manual exposure time.
+ if ((0 <= expTimeNs) &&
+ ((expTimeNs == 0) ||
+ ((expTimeNs >= m_pCapability->exposure_time_range[0]) &&
+ (expTimeNs <= m_pCapability->exposure_time_range[1])))) {
+ LOGH(", exposure time: %f ms", expTimeMs);
+ updateParamEntry(KEY_QC_EXPOSURE_TIME, expTimeStr);
+
+ cam_intf_parm_manual_3a_t exp_settings;
+ memset(&exp_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+ if (getManualCaptureMode() != CAM_MANUAL_CAPTURE_TYPE_OFF) {
+ exp_settings.previewOnly = TRUE;
+ if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL1) {
+ exp_settings.value = expTimeNs;
+ } else {
+ exp_settings.value =
+ (int64_t)(QCAMERA_MAX_EXP_TIME_LEVEL1*1000000L);
+ }
+ } else {
+ exp_settings.previewOnly = FALSE;
+ exp_settings.value = expTimeNs;
+ }
+
+ //Based on exposure values we can decide the capture type here
+ if (getManualCaptureMode() != CAM_MANUAL_CAPTURE_TYPE_OFF) {
+ if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL1) {
+ setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_1);
+ } else if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL2) {
+ setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_2);
+ } else if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL4) {
+ setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_3);
+ } else {
+ setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF);
+ }
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_EXPOSURE_TIME,
+ exp_settings)) {
+ return BAD_VALUE;
+ }
+ m_expTime = expTimeNs;
+
+ return NO_ERROR;
+ }
+ }
+
+ LOGE("Invalid exposure time, value: %s",
+ (expTimeStr == NULL) ? "NULL" : expTimeStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setLongshotEnable
+ *
+ * DESCRIPTION: set a flag indicating longshot mode
+ *
+ * PARAMETERS :
+ * @enable : true - Longshot enabled
+ * false - Longshot disabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLongshotEnable(bool enable)
+{
+ int32_t rc = NO_ERROR;
+ int8_t value = enable ? 1 : 0;
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_LONGSHOT_ENABLE, value)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to parameter changes");
+ return rc;
+ }
+
+ m_bLongshotEnabled = enable;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setFlash
+ *
+ * DESCRIPTION: set flash mode
+ *
+ * PARAMETERS :
+ * @flashStr : LED flash mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const char *flashStr)
+{
+ if (flashStr != NULL) {
+ int32_t value = lookupAttr(FLASH_MODES_MAP, PARAM_MAP_SIZE(FLASH_MODES_MAP), flashStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting Flash value %s", flashStr);
+ updateParamEntry(KEY_FLASH_MODE, flashStr);
+ mFlashValue = value;
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid flash value: %s", (flashStr == NULL) ? "NULL" : flashStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : updateFlashMode
+ *
+ * DESCRIPTION: update flash mode
+ *
+ * PARAMETERS :
+ * @flashStr : LED flash mode value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFlashMode(cam_flash_mode_t flash_mode)
+{
+ int32_t rc = NO_ERROR;
+ if (flash_mode >= CAM_FLASH_MODE_MAX) {
+ LOGH("Error!! Invalid flash mode (%d)", flash_mode);
+ return BAD_VALUE;
+ }
+ LOGH("Setting Flash mode from EZTune %d", flash_mode);
+
+ const char *flash_mode_str = lookupNameByValue(FLASH_MODES_MAP,
+ PARAM_MAP_SIZE(FLASH_MODES_MAP), flash_mode);
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ rc = setFlash(flash_mode_str);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to update Flash mode");
+ return rc;
+ }
+
+ LOGH("Setting Flash mode %d", mFlashValue);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_LED_MODE, mFlashValue)) {
+ LOGE("Failed to set led mode");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit parameters");
+ return rc;
+ }
+
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : configureFlash
+ *
+ * DESCRIPTION: configure Flash Bracketing.
+ *
+ * PARAMETERS :
+ * @frame_config : output configuration structure to fill in.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureFlash(cam_capture_frame_config_t &frame_config)
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+ uint32_t i = 0;
+
+ if (isChromaFlashEnabled()) {
+
+ rc = setToneMapMode(false, false);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to configure tone map");
+ return rc;
+ }
+
+ rc = setCDSMode(CAM_CDS_MODE_OFF, false);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to configure csd mode");
+ return rc;
+ }
+
+ LOGH("Enable Chroma Flash capture");
+ cam_flash_mode_t flash_mode = CAM_FLASH_MODE_OFF;
+ frame_config.num_batch =
+ m_pCapability->chroma_flash_settings_need.burst_count;
+ if (frame_config.num_batch > CAM_MAX_FLASH_BRACKETING) {
+ frame_config.num_batch = CAM_MAX_FLASH_BRACKETING;
+ }
+ for (i = 0; i < frame_config.num_batch; i++) {
+ flash_mode = (m_pCapability->chroma_flash_settings_need.flash_bracketing[i]) ?
+ CAM_FLASH_MODE_ON:CAM_FLASH_MODE_OFF;
+ frame_config.configs[i].num_frames = 1;
+ frame_config.configs[i].type = CAM_CAPTURE_FLASH;
+ frame_config.configs[i].flash_mode = flash_mode;
+ }
+ } else if (mFlashValue != CAM_FLASH_MODE_OFF) {
+ frame_config.num_batch = 1;
+ for (i = 0; i < frame_config.num_batch; i++) {
+ frame_config.configs[i].num_frames = getNumOfSnapshots();
+ frame_config.configs[i].type = CAM_CAPTURE_FLASH;
+ frame_config.configs[i].flash_mode =(cam_flash_mode_t)mFlashValue;
+ }
+ }
+
+ LOGD("Flash frame batch cnt = %d",frame_config.num_batch);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureHDRBracketing
+ *
+ * DESCRIPTION: configure HDR Bracketing.
+ *
+ * PARAMETERS :
+ * @frame_config : output configuration structure to fill in.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureHDRBracketing(cam_capture_frame_config_t &frame_config)
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+ uint32_t i = 0;
+
+ uint32_t hdrFrameCount = m_pCapability->hdr_bracketing_setting.num_frames;
+ LOGH("HDR values %d, %d frame count: %u",
+ (int8_t) m_pCapability->hdr_bracketing_setting.exp_val.values[0],
+ (int8_t) m_pCapability->hdr_bracketing_setting.exp_val.values[1],
+ hdrFrameCount);
+
+ frame_config.num_batch = hdrFrameCount;
+
+ cam_bracket_mode mode =
+ m_pCapability->hdr_bracketing_setting.exp_val.mode;
+ if (mode == CAM_EXP_BRACKETING_ON) {
+ rc = setToneMapMode(false, true);
+ if (rc != NO_ERROR) {
+ LOGW("Failed to disable tone map during HDR");
+ }
+ }
+ for (i = 0; i < frame_config.num_batch; i++) {
+ frame_config.configs[i].num_frames = 1;
+ frame_config.configs[i].type = CAM_CAPTURE_BRACKETING;
+ frame_config.configs[i].hdr_mode.mode = mode;
+ frame_config.configs[i].hdr_mode.values =
+ m_pCapability->hdr_bracketing_setting.exp_val.values[i];
+ LOGD("exp values %d",
+ (int)frame_config.configs[i].hdr_mode.values);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureAEBracketing
+ *
+ * DESCRIPTION: configure AE Bracketing.
+ *
+ * PARAMETERS :
+ * @frame_config : output configuration structure to fill in.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureAEBracketing(cam_capture_frame_config_t &frame_config)
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+ uint32_t i = 0;
+ char exp_value[MAX_EXP_BRACKETING_LENGTH];
+
+ rc = setToneMapMode(false, true);
+ if (rc != NO_ERROR) {
+ LOGH("Failed to disable tone map during AEBracketing");
+ }
+
+ uint32_t burstCount = 0;
+ const char *str_val = m_AEBracketingClient.values;
+ if ((str_val != NULL) && (strlen(str_val) > 0)) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ strlcpy(prop, str_val, PROPERTY_VALUE_MAX);
+ char *saveptr = NULL;
+ char *token = strtok_r(prop, ",", &saveptr);
+ if (token != NULL) {
+ exp_value[burstCount++] = (char)atoi(token);
+ while (token != NULL) {
+ token = strtok_r(NULL, ",", &saveptr);
+ if (token != NULL) {
+ exp_value[burstCount++] = (char)atoi(token);
+ }
+ }
+ }
+ }
+
+ frame_config.num_batch = burstCount;
+ cam_bracket_mode mode = m_AEBracketingClient.mode;
+
+ for (i = 0; i < frame_config.num_batch; i++) {
+ frame_config.configs[i].num_frames = 1;
+ frame_config.configs[i].type = CAM_CAPTURE_BRACKETING;
+ frame_config.configs[i].hdr_mode.mode = mode;
+ frame_config.configs[i].hdr_mode.values =
+ m_AEBracketingClient.values[i];
+ LOGD("exp values %d", (int)m_AEBracketingClient.values[i]);
+ }
+
+ LOGH("num_frame = %d X", burstCount);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureLowLight
+ *
+ * DESCRIPTION: configure low light frame capture use case.
+ *
+ * PARAMETERS :
+ * @frame_config : output configuration structure to fill in.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureLowLight(cam_capture_frame_config_t &frame_config)
+{
+ int32_t rc = NO_ERROR;
+
+ frame_config.num_batch = 1;
+ frame_config.configs[0].num_frames = getNumOfSnapshots();
+ frame_config.configs[0].type = CAM_CAPTURE_LOW_LIGHT;
+ frame_config.configs[0].low_light_mode = CAM_LOW_LIGHT_ON;
+ LOGH("Snapshot Count: %d", frame_config.configs[0].num_frames);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configureManualCapture
+ *
+ * DESCRIPTION: configure manual capture.
+ *
+ * PARAMETERS :
+ * @frame_config : output configaration structure to fill in.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureManualCapture(cam_capture_frame_config_t &frame_config)
+{
+ int32_t rc = NO_ERROR;
+ uint32_t i = 0;
+
+ LOGD("E");
+ if (getManualCaptureMode()) {
+ frame_config.num_batch = 1;
+ for (i = 0; i < frame_config.num_batch; i++) {
+ frame_config.configs[i].num_frames = getNumOfSnapshots();
+ frame_config.configs[i].type = CAM_CAPTURE_MANUAL_3A;
+ if (m_expTime != 0) {
+ frame_config.configs[i].manual_3A_mode.exp_mode = CAM_SETTINGS_TYPE_ON;
+ frame_config.configs[i].manual_3A_mode.exp_time = m_expTime;
+ } else {
+ frame_config.configs[i].manual_3A_mode.exp_mode = CAM_SETTINGS_TYPE_AUTO;
+ frame_config.configs[i].manual_3A_mode.exp_time = 0;
+ }
+
+ if (m_isoValue != 0) {
+ frame_config.configs[i].manual_3A_mode.iso_mode = CAM_SETTINGS_TYPE_ON;
+ frame_config.configs[i].manual_3A_mode.iso_value = m_isoValue;
+ } else {
+ frame_config.configs[i].manual_3A_mode.iso_mode = CAM_SETTINGS_TYPE_AUTO;
+ frame_config.configs[i].manual_3A_mode.iso_value = 0;
+ }
+ }
+ }
+ LOGD("X: batch cnt = %d", frame_config.num_batch);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : configFrameCapture
+ *
+ * DESCRIPTION: configuration for ZSL special captures (FLASH/HDR etc)
+ *
+ * PARAMETERS :
+ * @commitSettings : flag to enable or disable commit this this settings
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configFrameCapture(bool commitSettings)
+{
+ int32_t rc = NO_ERROR;
+ int32_t value;
+
+ memset(&m_captureFrameConfig, 0, sizeof(cam_capture_frame_config_t));
+
+ if (commitSettings) {
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ }
+
+ if (isHDREnabled() || m_bAeBracketingEnabled || m_bAFBracketingOn ||
+ m_bOptiZoomOn || m_bReFocusOn || m_LowLightLevel
+ || getManualCaptureMode()) {
+ value = CAM_FLASH_MODE_OFF;
+ } else if (isChromaFlashEnabled()) {
+ value = CAM_FLASH_MODE_ON;
+ } else {
+ value = mFlashValue;
+ }
+
+ if (value != CAM_FLASH_MODE_OFF) {
+ configureFlash(m_captureFrameConfig);
+ } else if(isHDREnabled()) {
+ configureHDRBracketing (m_captureFrameConfig);
+ } else if(isAEBracketEnabled()) {
+ configureAEBracketing (m_captureFrameConfig);
+ } else if (m_LowLightLevel) {
+ configureLowLight (m_captureFrameConfig);
+
+ //Added reset capture type as a last batch for back-end to restore settings.
+ int32_t batch_count = m_captureFrameConfig.num_batch;
+ m_captureFrameConfig.configs[batch_count].type = CAM_CAPTURE_RESET;
+ m_captureFrameConfig.configs[batch_count].num_frames = 0;
+ m_captureFrameConfig.num_batch++;
+ } else if (getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2){
+ rc = configureManualCapture (m_captureFrameConfig);
+ //Added reset capture type as a last batch for back-end to restore settings.
+ int32_t batch_count = m_captureFrameConfig.num_batch;
+ m_captureFrameConfig.configs[batch_count].type = CAM_CAPTURE_RESET;
+ m_captureFrameConfig.configs[batch_count].num_frames = 0;
+ m_captureFrameConfig.num_batch++;
+ }
+
+ rc = ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CAPTURE_FRAME_CONFIG,
+ (cam_capture_frame_config_t)m_captureFrameConfig);
+ if (rc != NO_ERROR) {
+ rc = BAD_VALUE;
+ LOGE("Failed to set capture settings");
+ return rc;
+ }
+
+ if (commitSettings) {
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit parameters");
+ return rc;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : resetFrameCapture
+ *
+ * DESCRIPTION: reset special captures settings(FLASH/HDR etc)
+ *
+ * PARAMETERS :
+ * @commitSettings : flag to enable or disable commit this this settings
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::resetFrameCapture(bool commitSettings)
+{
+ int32_t rc = NO_ERROR;
+ memset(&m_captureFrameConfig, 0, sizeof(cam_capture_frame_config_t));
+
+ if (commitSettings) {
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ }
+
+ if (isHDREnabled() || isAEBracketEnabled()) {
+ rc = setToneMapMode(true, true);
+ if (rc != NO_ERROR) {
+ LOGH("Failed to enable tone map during HDR/AEBracketing");
+ }
+ rc = stopAEBracket();
+ } else if ((isChromaFlashEnabled()) || (mFlashValue != CAM_FLASH_MODE_OFF)
+ || (getLowLightLevel() != CAM_LOW_LIGHT_OFF)) {
+ rc = setToneMapMode(true, false);
+ if (rc != NO_ERROR) {
+ LOGH("Failed to enable tone map during chroma flash");
+ }
+
+ rc = setCDSMode(mCds_mode, false);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to configure csd mode");
+ return rc;
+ }
+ }
+
+ rc = ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CAPTURE_FRAME_CONFIG,
+ (cam_capture_frame_config_t)m_captureFrameConfig);
+ if (rc != NO_ERROR) {
+ rc = BAD_VALUE;
+ LOGE("Failed to set capture settings");
+ return rc;
+ }
+
+ if (commitSettings) {
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit parameters");
+ return rc;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value
+ *
+ * PARAMETERS :
+ * @aecLockStr : AEC lock value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const char *aecLockStr)
+{
+ if (aecLockStr != NULL) {
+ int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+ aecLockStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting AECLock value %s", aecLockStr);
+ updateParamEntry(KEY_AUTO_EXPOSURE_LOCK, aecLockStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_AEC_LOCK, (uint32_t)value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid AECLock value: %s",
+ (aecLockStr == NULL) ? "NULL" : aecLockStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock value
+ *
+ * PARAMETERS :
+ * @awbLockStr : AWB lock value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const char *awbLockStr)
+{
+ if (awbLockStr != NULL) {
+ int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+ awbLockStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting AWBLock value %s", awbLockStr);
+ updateParamEntry(KEY_AUTO_WHITEBALANCE_LOCK, awbLockStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_AWB_LOCK, (uint32_t)value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid AWBLock value: %s", (awbLockStr == NULL) ? "NULL" : awbLockStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value
+ *
+ * PARAMETERS :
+ * @mceStr : MCE value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const char *mceStr)
+{
+ if (mceStr != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), mceStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting AWBLock value %s", mceStr);
+ updateParamEntry(KEY_QC_MEMORY_COLOR_ENHANCEMENT, mceStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_MCE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid MCE value: %s", (mceStr == NULL) ? "NULL" : mceStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setTintlessValue
+ *
+ * DESCRIPTION: enable/disable tintless from user setting
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTintlessValue(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_TINTLESS_ENABLE);
+ const char *prev_str = get(KEY_QC_TINTLESS_ENABLE);
+ char prop[PROPERTY_VALUE_MAX];
+
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.tintless", prop, VALUE_ENABLE);
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setTintlessValue(str);
+ }
+ } else {
+ if (prev_str == NULL ||
+ strcmp(prev_str, prop) != 0 ) {
+ setTintlessValue(prop);
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setTintless
+ *
+ * DESCRIPTION: set tintless mode
+ *
+ * PARAMETERS :
+ * @enable : 1 = enable, 0 = disable
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+void QCameraParameters::setTintless(bool enable)
+{
+ if (enable) {
+ setTintlessValue(VALUE_ENABLE);
+ } else {
+ setTintlessValue(VALUE_DISABLE);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setTintlessValue
+ *
+ * DESCRIPTION: set tintless value
+ *
+ * PARAMETERS :
+ * @tintStr : Tintless value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTintlessValue(const char *tintStr)
+{
+ if (tintStr != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), tintStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting Tintless value %s", tintStr);
+ updateParamEntry(KEY_QC_TINTLESS_ENABLE, tintStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_TINTLESS, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Tintless value: %s", (tintStr == NULL) ? "NULL" : tintStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setCDSMode
+ *
+ * DESCRIPTION: Set CDS mode
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCDSMode(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_CDS_MODE);
+ const char *prev_str = get(KEY_QC_CDS_MODE);
+ const char *video_str = params.get(KEY_QC_VIDEO_CDS_MODE);
+ const char *video_prev_str = get(KEY_QC_VIDEO_CDS_MODE);
+ int32_t rc = NO_ERROR;
+
+ if (m_bRecordingHint_new == true) {
+ if (video_str) {
+ if ((video_prev_str == NULL) || (strcmp(video_str, video_prev_str) != 0)) {
+ int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+ video_str);
+ if (cds_mode != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_VIDEO_CDS_MODE, video_str);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+ LOGE("Failed CDS MODE to update table");
+ rc = BAD_VALUE;
+ } else {
+ LOGD("Set CDS in video mode = %d", cds_mode);
+ mCds_mode = cds_mode;
+ m_bNeedRestart = true;
+ }
+ } else {
+ LOGE("Invalid argument for video CDS MODE %d", cds_mode);
+ rc = BAD_VALUE;
+ }
+ }
+ } else {
+ char video_prop[PROPERTY_VALUE_MAX];
+ memset(video_prop, 0, sizeof(video_prop));
+ property_get("persist.camera.video.CDS", video_prop, CDS_MODE_ON);
+ int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+ video_prop);
+ if (cds_mode != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_VIDEO_CDS_MODE, video_prop);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+ LOGE("Failed CDS MODE to update table");
+ rc = BAD_VALUE;
+ } else {
+ LOGD("Set CDS in video mode from setprop = %d", cds_mode);
+ mCds_mode = cds_mode;
+ }
+ } else {
+ LOGE("Invalid prop for video CDS MODE %d", cds_mode);
+ rc = BAD_VALUE;
+ }
+ }
+ } else {
+ if (str) {
+ if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+ int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+ str);
+ if (cds_mode != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_CDS_MODE, str);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+ LOGE("Failed CDS MODE to update table");
+ rc = BAD_VALUE;
+ } else {
+ LOGD("Set CDS in capture mode = %d", cds_mode);
+ mCds_mode = cds_mode;
+ m_bNeedRestart = true;
+ }
+ } else {
+ LOGE("Invalid argument for snapshot CDS MODE %d", cds_mode);
+ rc = BAD_VALUE;
+ }
+ }
+ } else {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.CDS", prop, CDS_MODE_ON);
+ int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+ prop);
+ if (cds_mode != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_CDS_MODE, prop);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+ LOGE("Failed CDS MODE to update table");
+ rc = BAD_VALUE;
+ } else {
+ LOGD("Set CDS in snapshot mode from setprop = %d", cds_mode);
+ mCds_mode = cds_mode;
+ }
+ } else {
+ LOGE("Invalid prop for snapshot CDS MODE %d", cds_mode);
+ rc = BAD_VALUE;
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setInitialExposureIndex
+ *
+ * DESCRIPTION: Set initial exposure index value
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInitialExposureIndex(const QCameraParameters& params)
+{
+ int32_t rc = NO_ERROR;
+ int value = -1;
+ const char *str = params.get(KEY_QC_INITIAL_EXPOSURE_INDEX);
+ const char *prev_str = get(KEY_QC_INITIAL_EXPOSURE_INDEX);
+ if (str) {
+ if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+ value = atoi(str);
+ LOGD("Set initial exposure index value from param = %d", value);
+ if (value >= 0) {
+ updateParamEntry(KEY_QC_INITIAL_EXPOSURE_INDEX, str);
+ }
+ }
+ } else {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.initial.exp.val", prop, "");
+ if ((strlen(prop) > 0) &&
+ ( (prev_str == NULL) || (strcmp(prop, prev_str) != 0))) {
+ value = atoi(prop);
+ LOGD("Set initial exposure index value from setprop = %d", value);
+ if (value >= 0) {
+ updateParamEntry(KEY_QC_INITIAL_EXPOSURE_INDEX, prop);
+ }
+ }
+ }
+
+ if (value >= 0) {
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX, (uint32_t)value)) {
+ LOGE("Failed to update initial exposure index value");
+ rc = BAD_VALUE;
+ }
+ } else {
+ LOGD("Invalid value for initial exposure index value %d", value);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setInstantCapture
+ *
+ * DESCRIPTION: Set Instant Capture related params
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantCapture(const QCameraParameters& params)
+{
+ int32_t rc = NO_ERROR;
+ int value = -1;
+ // Check for instant capture, this will enable instant AEC as well.
+ // This param will trigger the instant AEC param to backend
+ // And also will be useful for instant capture.
+ const char *str = params.get(KEY_QC_INSTANT_CAPTURE);
+ const char *prev_str = get(KEY_QC_INSTANT_CAPTURE);
+ if (str) {
+ if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+ value = lookupAttr(INSTANT_CAPTURE_MODES_MAP,
+ PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP), str);
+ LOGD("Set instant Capture from param = %d", value);
+ if(value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_INSTANT_CAPTURE, str);
+ }
+ }
+ } else {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.instant.capture", prop, KEY_QC_INSTANT_CAPTURE_DISABLE);
+ if ((prev_str == NULL) || (strcmp(prop, prev_str) != 0)) {
+ value = lookupAttr(INSTANT_CAPTURE_MODES_MAP,
+ PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP), prop);
+ LOGD("Set instant capture from setprop = %d", value);
+ if (value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_INSTANT_CAPTURE, prop);
+ }
+ }
+ }
+
+ // Set instant AEC param to the backend for either instant capture or instant AEC
+ // 0 - disbale (normal AEC)
+ // 1 - Aggressive AEC (algo used in backend)
+ // 2 - Fast AEC (algo used in backend)
+ if (value != NAME_NOT_FOUND && value != -1) {
+ m_bInstantCapture = (value > 0)? true : false;
+ setInstantAEC((uint8_t)value, false);
+ }
+
+
+ // get frame aec bound value from setprop.
+ // This value indicates the number of frames, camera interface
+ // will wait for getting the instant capture frame.
+ // Default value set to 7.
+ // This value also indicates the number of frames, that HAL
+ // will not display and will not send preview frames to app
+ // This will be applicable only if instant capture is set.
+ if (m_bInstantCapture) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.ae.capture.bound", prop, "7");
+ int32_t frame_bound = atoi(prop);
+ if (frame_bound >= 0) {
+ mAecFrameBound = (uint8_t)frame_bound;
+ } else {
+ LOGE("Invalid prop for aec frame bound %d", frame_bound);
+ rc = BAD_VALUE;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setInstantAEC
+ *
+ * DESCRIPTION: Set Instant AEC related params
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantAEC(const QCameraParameters& params)
+{
+ int32_t rc = NO_ERROR;
+ int value = -1;
+
+ // Check for instant AEC only when instant capture is not enabled.
+ // Instant capture already takes care of the instant AEC as well.
+ if (!m_bInstantCapture) {
+ // Check for instant AEC. Instant AEC will only enable fast AEC.
+ // It will not enable instant capture.
+ // This param will trigger the instant AEC param to backend
+ const char *str = params.get(KEY_QC_INSTANT_AEC);
+ const char *prev_str = get(KEY_QC_INSTANT_AEC);
+ if (str) {
+ if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+ value = lookupAttr(INSTANT_AEC_MODES_MAP,
+ PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP), str);
+ LOGD("Set instant AEC from param = %d", value);
+ }
+ } else {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.instant.aec", prop, KEY_QC_INSTANT_AEC_DISABLE);
+ if ((prev_str == NULL) || (strcmp(prop, prev_str) != 0)) {
+ value = lookupAttr(INSTANT_AEC_MODES_MAP,
+ PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP), prop);
+ LOGD("Set instant AEC from setprop = %d", value);
+ }
+ }
+
+ // Set instant AEC param to the backend for either instant capture or instant AEC
+ // 0 - disbale (normal AEC)
+ // 1 - Aggressive AEC (algo used in backend)
+ // 2 - Fast AEC (algo used in backend)
+ if (value != NAME_NOT_FOUND && value != -1) {
+ setInstantAEC((uint8_t)value, false);
+ }
+
+ }
+
+ // get frame aec preview skip count from setprop.
+ // This value indicates the number of frames, that HAL
+ // will not display and will not send preview frames to app
+ // Default value set to 7.
+ // This will be applicable only if instant aec is set.
+ if (m_bInstantAEC) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.ae.instant.bound", prop, "7");
+ int32_t aec_frame_skip_cnt = atoi(prop);
+ if (aec_frame_skip_cnt >= 0) {
+ mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
+ } else {
+ LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
+ rc = BAD_VALUE;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setDISValue
+ *
+ * DESCRIPTION: set DIS value
+ *
+ * PARAMETERS :
+ * @disStr : DIS value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const char *disStr)
+{
+ if (disStr != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), disStr);
+ if (value != NAME_NOT_FOUND) {
+ //For some IS types (like EIS 2.0), when DIS value is changed, we need to restart
+ //preview because of topology change in backend. But, for now, restart preview
+ //for all IS types.
+ m_bNeedRestart = true;
+ LOGH("Setting DIS value %s", disStr);
+ updateParamEntry(KEY_QC_DIS, disStr);
+ if (!(strcmp(disStr,"enable"))) {
+ m_bDISEnabled = true;
+ } else {
+ m_bDISEnabled = false;
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_DIS_ENABLE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid DIS value: %s", (disStr == NULL) ? "NULL" : disStr);
+ m_bDISEnabled = false;
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : updateOisValue
+ *
+ * DESCRIPTION: update OIS value
+ *
+ * PARAMETERS :
+ * @oisValue : OIS value TRUE/FALSE
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateOisValue(bool oisValue)
+{
+ uint8_t enable = 0;
+ int32_t rc = NO_ERROR;
+
+ // Check for OIS disable
+ char ois_prop[PROPERTY_VALUE_MAX];
+ memset(ois_prop, 0, sizeof(ois_prop));
+ property_get("persist.camera.ois.disable", ois_prop, "0");
+ uint8_t ois_disable = (uint8_t)atoi(ois_prop);
+
+ //Enable OIS if it is camera mode or Camcoder 4K mode
+ if (!m_bRecordingHint || (is4k2kVideoResolution() && m_bRecordingHint)) {
+ enable = 1;
+ LOGH("Valid OIS mode!! ");
+ }
+ // Disable OIS if setprop is set
+ if (ois_disable || !oisValue) {
+ //Disable OIS
+ enable = 0;
+ LOGH("Disable OIS mode!! ois_disable(%d) oisValue(%d)",
+ ois_disable, oisValue);
+
+ }
+ m_bOISEnabled = enable;
+ if (m_bOISEnabled) {
+ updateParamEntry(KEY_QC_OIS, VALUE_ENABLE);
+ } else {
+ updateParamEntry(KEY_QC_OIS, VALUE_DISABLE);
+ }
+
+ if (initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ LOGH("Sending OIS mode (%d)", enable);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_META_LENS_OPT_STAB_MODE, enable)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to parameter changes");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setHighFrameRate
+ *
+ * DESCRIPTION: set high frame rate
+ *
+ * PARAMETERS :
+ * @hfrMode : HFR mode
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHighFrameRate(const int32_t hfrMode)
+{
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HFR, hfrMode)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value
+ *
+ * PARAMETERS :
+ * @lensSahdeStr : lens shade value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const char *lensShadeStr)
+{
+ if (lensShadeStr != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), lensShadeStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting LensShade value %s", lensShadeStr);
+ updateParamEntry(KEY_QC_LENSSHADE, lensShadeStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ROLLOFF, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid LensShade value: %s",
+ (lensShadeStr == NULL) ? "NULL" : lensShadeStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value
+ *
+ * PARAMETERS :
+ * @expComp : exposure compensation value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(int expComp)
+{
+ char val[16];
+ snprintf(val, sizeof(val), "%d", expComp);
+ updateParamEntry(KEY_EXPOSURE_COMPENSATION, val);
+
+ // Don't need to pass step as part of setParameter because
+ // camera daemon is already aware of it.
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_EXPOSURE_COMPENSATION, expComp)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance mode
+ *
+ * PARAMETERS :
+ * @wbStr : white balance mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const char *wbStr)
+{
+ if (wbStr != NULL) {
+ int32_t value = lookupAttr(WHITE_BALANCE_MODES_MAP,
+ PARAM_MAP_SIZE(WHITE_BALANCE_MODES_MAP), wbStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting WhiteBalance value %s", wbStr);
+ updateParamEntry(KEY_WHITE_BALANCE, wbStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WHITE_BALANCE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid WhiteBalance value: %s", (wbStr == NULL) ? "NULL" : wbStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setWBManualCCT
+ *
+ * DESCRIPTION: set setWBManualCCT time
+ *
+ * PARAMETERS :
+ * @cctStr : string of wb cct, range (2000, 8000) in K.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWBManualCCT(const char *cctStr)
+{
+ if (cctStr != NULL) {
+ int32_t cctVal = atoi(cctStr);
+ int32_t minCct = m_pCapability->min_wb_cct; /* 2000K */
+ int32_t maxCct = m_pCapability->max_wb_cct; /* 8000K */
+
+ if (cctVal >= minCct && cctVal <= maxCct) {
+ LOGH(", cct value: %d", cctVal);
+ updateParamEntry(KEY_QC_WB_MANUAL_CCT, cctStr);
+ cam_manual_wb_parm_t manual_wb;
+ manual_wb.type = CAM_MANUAL_WB_MODE_CCT;
+ manual_wb.cct = cctVal;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WB_MANUAL, manual_wb)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+
+ LOGE("Invalid cct, value: %s",
+ (cctStr == NULL) ? "NULL" : cctStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : updateAWBParams
+ *
+ * DESCRIPTION: update CCT parameters key
+ *
+ * PARAMETERS :
+ * @awb_params : WB parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateAWBParams(cam_awb_params_t &awb_params)
+{
+ //check and update CCT
+ int32_t prev_cct = getInt(KEY_QC_WB_MANUAL_CCT);
+ if (prev_cct != awb_params.cct_value) {
+ LOGD("update current cct value. old:%d, now:%d",
+ prev_cct, awb_params.cct_value);
+ set(KEY_QC_WB_MANUAL_CCT, awb_params.cct_value);
+ }
+
+ //check and update WB gains
+ const char *prev_gains = get(KEY_QC_MANUAL_WB_GAINS);
+ char gainStr[30];
+ snprintf(gainStr, sizeof(gainStr), "%f,%f,%f", awb_params.rgb_gains.r_gain,
+ awb_params.rgb_gains.g_gain, awb_params.rgb_gains.b_gain);
+
+ if (prev_gains == NULL || strcmp(prev_gains, gainStr)) {
+ set(KEY_QC_MANUAL_WB_GAINS, gainStr);
+ LOGD("update currernt RGB gains: old %s new %s", prev_gains, gainStr);
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : parseGains
+ *
+ * DESCRIPTION: parse WB gains
+ *
+ * PARAMETERS :
+ * @gainStr : WB result string
+ * @r_gain : WB red gain
+ * @g_gain : WB green gain
+ * @b_gain : WB blue gain
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseGains(const char *gainStr, double &r_gain,
+ double &g_gain, double &b_gain)
+{
+ int32_t rc = NO_ERROR;
+ char *saveptr = NULL;
+ size_t gains_size = strlen(gainStr) + 1;
+ char* gains = (char*) calloc(1, gains_size);
+ if (NULL == gains) {
+ LOGE("No memory for gains");
+ return NO_MEMORY;
+ }
+ strlcpy(gains, gainStr, gains_size);
+ char *token = strtok_r(gains, ",", &saveptr);
+
+ if (NULL != token) {
+ r_gain = (float) atof(token);
+ token = strtok_r(NULL, ",", &saveptr);
+ }
+
+ if (NULL != token) {
+ g_gain = (float) atof(token);
+ token = strtok_r(NULL, ",", &saveptr);
+ }
+
+ if (NULL != token) {
+ b_gain = (float) atof(token);
+ } else {
+ LOGE("Malformed string for gains");
+ rc = BAD_VALUE;
+ }
+
+ free(gains);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setManualWBGains
+ *
+ * DESCRIPTION: set manual wb gains for r,g,b
+ *
+ * PARAMETERS :
+ * @cctStr : string of wb gains, range (1.0, 4.0).
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setManualWBGains(const char *gainStr)
+{
+ int32_t rc = NO_ERROR;
+ if (gainStr != NULL) {
+ double r_gain,g_gain,b_gain;
+ rc = parseGains(gainStr, r_gain, g_gain, b_gain);
+ if (rc != NO_ERROR) {
+ return rc;
+ }
+
+ double minGain = m_pCapability->min_wb_gain;
+ double maxGain = m_pCapability->max_wb_gain;
+
+ if (r_gain >= minGain && r_gain <= maxGain &&
+ g_gain >= minGain && g_gain <= maxGain &&
+ b_gain >= minGain && b_gain <= maxGain) {
+ LOGH(", setting rgb gains: r = %lf g = %lf b = %lf",
+ r_gain, g_gain, b_gain);
+ updateParamEntry(KEY_QC_MANUAL_WB_GAINS, gainStr);
+ cam_manual_wb_parm_t manual_wb;
+ manual_wb.type = CAM_MANUAL_WB_MODE_GAIN;
+ manual_wb.gains.r_gain = r_gain;
+ manual_wb.gains.g_gain = g_gain;
+ manual_wb.gains.b_gain = b_gain;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WB_MANUAL, manual_wb)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+
+ LOGH("Invalid manual wb gains: %s",
+ (gainStr == NULL) ? "NULL" : gainStr);
+ return BAD_VALUE;
+}
+
+int QCameraParameters::getAutoFlickerMode()
+{
+ /* Enable Advanced Auto Antibanding where we can set
+ any of the following option
+ ie. CAM_ANTIBANDING_MODE_AUTO
+ CAM_ANTIBANDING_MODE_AUTO_50HZ
+ CAM_ANTIBANDING_MODE_AUTO_60HZ
+ Currently setting it to default */
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.set.afd", prop, "3");
+ return atoi(prop);
+}
+
+/*===========================================================================
+ * FUNCTION : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value
+ *
+ * PARAMETERS :
+ * @antiBandingStr : antibanding value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const char *antiBandingStr)
+{
+ if (antiBandingStr != NULL) {
+ int32_t value = lookupAttr(ANTIBANDING_MODES_MAP, PARAM_MAP_SIZE(ANTIBANDING_MODES_MAP),
+ antiBandingStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGH("Setting AntiBanding value %s", antiBandingStr);
+ updateParamEntry(KEY_ANTIBANDING, antiBandingStr);
+ if(value == CAM_ANTIBANDING_MODE_AUTO) {
+ value = getAutoFlickerMode();
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_ANTIBANDING, (uint32_t)value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid AntiBanding value: %s",
+ (antiBandingStr == NULL) ? "NULL" : antiBandingStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas
+ *
+ * PARAMETERS :
+ * @focusAreasStr : focus areas value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const char *focusAreasStr)
+{
+ if (m_pCapability->max_num_focus_areas == 0 ||
+ focusAreasStr == NULL) {
+ LOGD("Parameter string is null");
+ return NO_ERROR;
+ }
+
+ cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+ if (NULL == areas) {
+ LOGE("No memory for areas");
+ return NO_MEMORY;
+ }
+ memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+ int num_areas_found = 0;
+ if (parseCameraAreaString(focusAreasStr,
+ m_pCapability->max_num_focus_areas,
+ areas,
+ num_areas_found) != NO_ERROR) {
+ LOGE("Failed to parse the string: %s", focusAreasStr);
+ free(areas);
+ return BAD_VALUE;
+ }
+
+ if (validateCameraAreas(areas, num_areas_found) == false) {
+ LOGE("invalid areas specified : %s", focusAreasStr);
+ free(areas);
+ return BAD_VALUE;
+ }
+
+ updateParamEntry(KEY_FOCUS_AREAS, focusAreasStr);
+
+ //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+ //so no action is takenby the lower layer
+ if (num_areas_found == 1 &&
+ areas[0].rect.left == 0 &&
+ areas[0].rect.top == 0 &&
+ areas[0].rect.width == 0 &&
+ areas[0].rect.height == 0 &&
+ areas[0].weight == 0) {
+ num_areas_found = 0;
+ }
+
+ int previewWidth, previewHeight;
+ getPreviewSize(&previewWidth, &previewHeight);
+ cam_roi_info_t af_roi_value;
+ memset(&af_roi_value, 0, sizeof(cam_roi_info_t));
+ af_roi_value.num_roi = (uint8_t)num_areas_found;
+ for (int i = 0; i < num_areas_found; i++) {
+ LOGH("FocusArea[%d] = (%d, %d, %d, %d)",
+ i, (areas[i].rect.top), (areas[i].rect.left),
+ (areas[i].rect.width), (areas[i].rect.height));
+
+ // Transform the coords from (-1000, 1000)
+ // to (0, previewWidth or previewHeight).
+ af_roi_value.roi[i].left =
+ (int32_t)(((double)areas[i].rect.left + 1000.0) *
+ ((double)previewWidth / 2000.0));
+ af_roi_value.roi[i].top =
+ (int32_t)(((double)areas[i].rect.top + 1000.0) *
+ ((double)previewHeight / 2000.0));
+ af_roi_value.roi[i].width =
+ (int32_t)((double)areas[i].rect.width *
+ (double)previewWidth / 2000.0);
+ af_roi_value.roi[i].height =
+ (int32_t)((double)areas[i].rect.height *
+ (double)previewHeight / 2000.0);
+ af_roi_value.weight[i] = areas[i].weight;
+ }
+ free(areas);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AF_ROI, af_roi_value)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas value
+ *
+ * PARAMETERS :
+ * @meteringAreasStr : metering areas value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const char *meteringAreasStr)
+{
+ if (m_pCapability->max_num_metering_areas == 0 ||
+ meteringAreasStr == NULL) {
+ LOGD("Parameter string is null");
+ return NO_ERROR;
+ }
+
+ cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+ if (NULL == areas) {
+ LOGE("No memory for areas");
+ return NO_MEMORY;
+ }
+ memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+ int num_areas_found = 0;
+ if (parseCameraAreaString(meteringAreasStr,
+ m_pCapability->max_num_metering_areas,
+ areas,
+ num_areas_found) < 0) {
+ LOGE("Failed to parse the string: %s", meteringAreasStr);
+ free(areas);
+ return BAD_VALUE;
+ }
+
+ if (validateCameraAreas(areas, num_areas_found) == false) {
+ LOGE("invalid areas specified : %s", meteringAreasStr);
+ free(areas);
+ return BAD_VALUE;
+ }
+
+ updateParamEntry(KEY_METERING_AREAS, meteringAreasStr);
+
+ //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+ //so no action is takenby the lower layer
+ if (num_areas_found == 1 &&
+ areas[0].rect.left == 0 &&
+ areas[0].rect.top == 0 &&
+ areas[0].rect.width == 0 &&
+ areas[0].rect.height == 0 &&
+ areas[0].weight == 0) {
+ num_areas_found = 0;
+ }
+ cam_set_aec_roi_t aec_roi_value;
+ int previewWidth, previewHeight;
+ getPreviewSize(&previewWidth, &previewHeight);
+
+ memset(&aec_roi_value, 0, sizeof(cam_set_aec_roi_t));
+ if (num_areas_found > 0) {
+ aec_roi_value.aec_roi_enable = CAM_AEC_ROI_ON;
+ aec_roi_value.aec_roi_type = CAM_AEC_ROI_BY_COORDINATE;
+
+ for (int i = 0; i < num_areas_found; i++) {
+ LOGH("MeteringArea[%d] = (%d, %d, %d, %d)",
+ i, (areas[i].rect.top), (areas[i].rect.left),
+ (areas[i].rect.width), (areas[i].rect.height));
+
+ // Transform the coords from (-1000, 1000) to
+ // (0, previewWidth or previewHeight).
+ aec_roi_value.cam_aec_roi_position.coordinate[i].x =
+ (uint32_t)((((double)areas[i].rect.left +
+ (double)areas[i].rect.width / 2.0) + 1000.0) *
+ (double)previewWidth / 2000.0);
+ aec_roi_value.cam_aec_roi_position.coordinate[i].y =
+ (uint32_t)((((double)areas[i].rect.top +
+ (double)areas[i].rect.height / 2.0) + 1000.0) *
+ (double)previewHeight / 2000.0);
+ }
+ } else {
+ aec_roi_value.aec_roi_enable = CAM_AEC_ROI_OFF;
+ }
+ free(areas);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AEC_ROI, aec_roi_value)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : isSupportedSensorHdrSize
+ *
+ * DESCRIPTION: Checks if the requested snapshot size is compatible with currently
+ * configured HDR mode, currently primary target for validation is
+ * zzhdr however this function can be extended in the future to vet
+ * all sensor based HDR configs
+ *
+ * PARAMETERS :
+ * @params : CameraParameters object
+ *
+ * RETURN : boolean type
+ * True -- indicates supported config
+ * False -- indicated unsupported config should fallback to other
+ * available HDR modes
+ *==========================================================================*/
+bool QCameraParameters::isSupportedSensorHdrSize(const QCameraParameters& params)
+{
+ char value[PROPERTY_VALUE_MAX];
+ memset(value, 0, sizeof(value));
+ property_get("persist.camera.zzhdr.enable", value, "0");
+ uint8_t zzhdr_enable = (uint8_t)atoi(value);
+
+ if (zzhdr_enable) {
+
+ int req_w, req_h;
+ params.getPictureSize(&req_w, &req_h);
+
+ // Check if requested w x h is in zzhdr supported list
+ for (size_t i = 0; i< m_pCapability->zzhdr_sizes_tbl_cnt; ++i) {
+
+ if (req_w == m_pCapability->zzhdr_sizes_tbl[i].width &&
+ req_h == m_pCapability->zzhdr_sizes_tbl[i].height) {
+ LOGD("%s: Found match for %d x %d", __func__, req_w, req_h);
+ return true;
+ }
+ }
+ LOGH("%s: %d x %d is not supported for zzhdr mode", __func__, req_w, req_h);
+ return false;
+ }
+
+ return true;
+}
+
+/*===========================================================================
+ * FUNCTION : setSceneMode
+ *
+ * DESCRIPTION: set scene mode
+ *
+ * PARAMETERS :
+ * @sceneModeStr : scene mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const char *sceneModeStr)
+{
+ if (sceneModeStr != NULL) {
+ int32_t value = lookupAttr(SCENE_MODES_MAP, PARAM_MAP_SIZE(SCENE_MODES_MAP), sceneModeStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGD("Setting SceneMode %s", sceneModeStr);
+ updateParamEntry(KEY_SCENE_MODE, sceneModeStr);
+ if (m_bSensorHDREnabled) {
+ // Incase of HW HDR mode, we do not update the same as Best shot mode.
+ LOGH("H/W HDR mode enabled. Do not set Best Shot Mode");
+ return NO_ERROR;
+ }
+ if (m_bSceneSelection) {
+ setSelectedScene((cam_scene_mode_type) value);
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BESTSHOT_MODE,
+ (uint32_t)value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Secene Mode: %s",
+ (sceneModeStr == NULL) ? "NULL" : sceneModeStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone AF algorithm
+ *
+ * PARAMETERS :
+ * @selZoneAFStr : selectable zone AF algorithm value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const char *selZoneAFStr)
+{
+ if (selZoneAFStr != NULL) {
+ int32_t value = lookupAttr(FOCUS_ALGO_MAP, PARAM_MAP_SIZE(FOCUS_ALGO_MAP), selZoneAFStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGD("Setting Selectable Zone AF value %s", selZoneAFStr);
+ updateParamEntry(KEY_QC_SELECTABLE_ZONE_AF, selZoneAFStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FOCUS_ALGO_TYPE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid selectable zone af value: %s",
+ (selZoneAFStr == NULL) ? "NULL" : selZoneAFStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : isAEBracketEnabled
+ *
+ * DESCRIPTION: checks if AE bracketing is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN : TRUE/FALSE
+ *==========================================================================*/
+bool QCameraParameters::isAEBracketEnabled()
+{
+ const char *str = get(KEY_QC_AE_BRACKET_HDR);
+ if (str != NULL) {
+ if (strcmp(str, AE_BRACKET_OFF) != 0) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket value
+ *
+ * PARAMETERS :
+ * @aecBracketStr : AE bracket value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const char *aecBracketStr)
+{
+ if (aecBracketStr == NULL) {
+ LOGD("setAEBracket with NULL value");
+ return NO_ERROR;
+ }
+
+ cam_exp_bracketing_t expBracket;
+ memset(&expBracket, 0, sizeof(expBracket));
+
+ int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+ aecBracketStr);
+ switch (value) {
+ case CAM_EXP_BRACKETING_ON:
+ {
+ LOGD("EXP_BRACKETING_ON");
+ const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+ if ((str_val != NULL) && (strlen(str_val)>0)) {
+ expBracket.mode = CAM_EXP_BRACKETING_ON;
+ m_bAeBracketingEnabled = true;
+ strlcpy(expBracket.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+ LOGD("setting Exposure Bracketing value of %s",
+ expBracket.values);
+ }
+ else {
+ /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+ LOGD("capture-burst-exposures not set, back to HDR OFF mode");
+ m_bAeBracketingEnabled = false;
+ expBracket.mode = CAM_EXP_BRACKETING_OFF;
+ }
+ }
+ break;
+ default:
+ {
+ m_bAeBracketingEnabled = false;
+ LOGH(", EXP_BRACKETING_OFF");
+ expBracket.mode = CAM_EXP_BRACKETING_OFF;
+ }
+ break;
+ }
+
+ // Cache client AE bracketing configuration
+ memcpy(&m_AEBracketingClient, &expBracket, sizeof(cam_exp_bracketing_t));
+
+ /* save the value*/
+ updateParamEntry(KEY_QC_AE_BRACKET_HDR, aecBracketStr);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : set3ALock
+ *
+ * DESCRIPTION: enable/disable 3A lock.
+ *
+ * PARAMETERS :
+ * @lock3A : lock or unlock
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::set3ALock(bool lock3A)
+{
+ int32_t rc = NO_ERROR;
+ LOGH("Setting Lock %d", lock3A);
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ uint32_t focus_mode = CAM_FOCUS_MODE_AUTO;
+ if (lock3A) {
+ if (isUbiFocusEnabled() || isUbiRefocus()) {
+ //For Ubi focus move focus to infinity.
+ focus_mode = CAM_FOCUS_MODE_INFINITY;
+ } else if (isOptiZoomEnabled() || isStillMoreEnabled()) {
+ //For optizoom and stillmore, set focus as fixed.
+ focus_mode = CAM_FOCUS_MODE_FIXED;
+ }
+ } else {
+ // retrieve previous focus value.
+ const char *focus = get(KEY_FOCUS_MODE);
+ int val = lookupAttr(FOCUS_MODES_MAP, PARAM_MAP_SIZE(FOCUS_MODES_MAP), focus);
+ if (val != NAME_NOT_FOUND) {
+ focus_mode = (uint32_t) val;
+ LOGD("focus mode %s", focus);
+ }
+ }
+ //Lock AWB
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AWB_LOCK, (uint32_t)lock3A)) {
+ return BAD_VALUE;
+ }
+ //Lock AEC
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AEC_LOCK, (uint32_t)lock3A)) {
+ return BAD_VALUE;
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FOCUS_MODE, focus_mode)) {
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit batch");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setAndCommitZoom
+ *
+ * DESCRIPTION: set zoom.
+ *
+ * PARAMETERS :
+ * @zoom_level : zoom level to set.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAndCommitZoom(int zoom_level)
+{
+ LOGH("E");
+ int32_t rc = NO_ERROR;
+ if (initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZOOM, zoom_level)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set Flash value");
+ }
+
+ mZoomLevel = zoom_level;
+ LOGH("X");
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : isOptiZoomEnabled
+ *
+ * DESCRIPTION: checks whether optizoom is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN : true - enabled, false - disabled
+ *
+ *==========================================================================*/
+bool QCameraParameters::isOptiZoomEnabled()
+{
+ if (m_bOptiZoomOn && (0 <= mParmZoomLevel)) {
+ uint32_t zoom_level = (uint32_t) mParmZoomLevel;
+ cam_opti_zoom_t *opti_zoom_settings_need =
+ &(m_pCapability->opti_zoom_settings_need);
+ uint32_t zoom_threshold = (uint32_t) opti_zoom_settings_need->zoom_threshold;
+ LOGH("current zoom level =%u & zoom_threshold =%u",
+ zoom_level, zoom_threshold);
+
+ if (zoom_level >= zoom_threshold) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : setNoiseReductionMode
+ *
+ * DESCRIPTION: set noise reduction mode
+ *
+ * PARAMETERS :
+ * @noiseReductionModeStr : noise reduction mode
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoiseReductionMode(const char *noiseReductionModeStr)
+{
+ LOGH("noiseReductionModeStr = %s", noiseReductionModeStr);
+ if (noiseReductionModeStr != NULL) {
+ int value = lookupAttr(NOISE_REDUCTION_MODES_MAP, PARAM_MAP_SIZE(NOISE_REDUCTION_MODES_MAP),
+ noiseReductionModeStr);
+ if (value != NAME_NOT_FOUND) {
+ m_bHighQualityNoiseReductionMode =
+ !strncmp(VALUE_HIGH_QUALITY, noiseReductionModeStr, strlen(VALUE_HIGH_QUALITY));
+ updateParamEntry(KEY_QC_NOISE_REDUCTION_MODE, noiseReductionModeStr);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid noise reduction mode value: %s",
+ (noiseReductionModeStr == NULL) ? "NULL" : noiseReductionModeStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : commitAFBracket
+ *
+ * DESCRIPTION: commit AF Bracket.
+ *
+ * PARAMETERS :
+ * @AFBracket : AF bracketing configuration
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitAFBracket(cam_af_bracketing_t afBracket)
+{
+
+ int32_t rc = NO_ERROR;
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FOCUS_BRACKETING, afBracket)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit batch");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setAFBracket
+ *
+ * DESCRIPTION: set AF bracket value
+ *
+ * PARAMETERS :
+ * @afBracketStr : AF bracket value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAFBracket(const char *afBracketStr)
+{
+ LOGH("afBracketStr =%s",afBracketStr);
+
+ if(afBracketStr != NULL) {
+ int value = lookupAttr(AF_BRACKETING_MODES_MAP, PARAM_MAP_SIZE(AF_BRACKETING_MODES_MAP),
+ afBracketStr);
+ if (value != NAME_NOT_FOUND) {
+ m_bAFBracketingOn = (value != 0);
+ updateParamEntry(KEY_QC_AF_BRACKET, afBracketStr);
+
+ return NO_ERROR;
+ }
+ }
+
+ LOGE("Invalid af bracket value: %s",
+ (afBracketStr == NULL) ? "NULL" : afBracketStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setReFocus
+ *
+ * DESCRIPTION: set refocus value
+ *
+ * PARAMETERS :
+ * @afBracketStr : refocus value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setReFocus(const char *reFocusStr)
+{
+ LOGH("reFocusStr =%s",reFocusStr);
+
+ if (reFocusStr != NULL) {
+ int value = lookupAttr(RE_FOCUS_MODES_MAP, PARAM_MAP_SIZE(RE_FOCUS_MODES_MAP),
+ reFocusStr);
+ if (value != NAME_NOT_FOUND) {
+ m_bReFocusOn = (value != 0);
+ updateParamEntry(KEY_QC_RE_FOCUS, reFocusStr);
+ return NO_ERROR;
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setChromaFlash
+ *
+ * DESCRIPTION: set chroma flash value
+ *
+ * PARAMETERS :
+ * @aecBracketStr : chroma flash value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setChromaFlash(const char *chromaFlashStr)
+{
+ LOGH("chromaFlashStr =%s",chromaFlashStr);
+ if(chromaFlashStr != NULL) {
+ int value = lookupAttr(CHROMA_FLASH_MODES_MAP, PARAM_MAP_SIZE(CHROMA_FLASH_MODES_MAP),
+ chromaFlashStr);
+ if(value != NAME_NOT_FOUND) {
+ m_bChromaFlashOn = (value != 0);
+ updateParamEntry(KEY_QC_CHROMA_FLASH, chromaFlashStr);
+
+ return NO_ERROR;
+ }
+ }
+
+ LOGE("Invalid chroma flash value: %s",
+ (chromaFlashStr == NULL) ? "NULL" : chromaFlashStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setOptiZoom
+ *
+ * DESCRIPTION: set opti zoom value
+ *
+ * PARAMETERS :
+ * @optiZoomStr : opti zoom value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOptiZoom(const char *optiZoomStr)
+{
+ LOGH("optiZoomStr =%s",optiZoomStr);
+ if(optiZoomStr != NULL) {
+ int value = lookupAttr(OPTI_ZOOM_MODES_MAP, PARAM_MAP_SIZE(OPTI_ZOOM_MODES_MAP),
+ optiZoomStr);
+ if(value != NAME_NOT_FOUND) {
+ m_bOptiZoomOn = (value != 0);
+ updateParamEntry(KEY_QC_OPTI_ZOOM, optiZoomStr);
+
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid opti zoom value: %s",
+ (optiZoomStr == NULL) ? "NULL" : optiZoomStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setTruePortrait
+ *
+ * DESCRIPTION: set true portrait value
+ *
+ * PARAMETERS :
+ * @optiZoomStr : true portrait value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTruePortrait(const char *truePortraitStr)
+{
+ LOGH("truePortraitStr =%s", truePortraitStr);
+ if (truePortraitStr != NULL) {
+ int value = lookupAttr(TRUE_PORTRAIT_MODES_MAP,
+ PARAM_MAP_SIZE(TRUE_PORTRAIT_MODES_MAP),
+ truePortraitStr);
+ if (value != NAME_NOT_FOUND) {
+ m_bTruePortraitOn = (value != 0);
+ updateParamEntry(KEY_QC_TRUE_PORTRAIT, truePortraitStr);
+ setFaceDetection(m_bFaceDetectionOn, false);
+ return NO_ERROR;
+ }
+ }
+ LOGH("Invalid true portrait value: %s",
+ (truePortraitStr == NULL) ? "NULL" : truePortraitStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setHDRMode
+ *
+ * DESCRIPTION: set hdr mode value
+ *
+ * PARAMETERS :
+ * @hdrModeStr : hdr mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRMode(const char *hdrModeStr)
+{
+ LOGH("hdrModeStr =%s", hdrModeStr);
+ if (hdrModeStr != NULL) {
+ int value = lookupAttr(HDR_MODES_MAP, PARAM_MAP_SIZE(HDR_MODES_MAP), hdrModeStr);
+ if (value != NAME_NOT_FOUND) {
+ const char *str = get(KEY_SCENE_MODE);
+
+ m_bHDRModeSensor = !strncmp(hdrModeStr, HDR_MODE_SENSOR, strlen(HDR_MODE_SENSOR));
+
+ updateParamEntry(KEY_QC_HDR_MODE, hdrModeStr);
+
+ // If hdr is already selected, need to deselect it in local cache
+ // So the new hdr mode will be applied
+ if (str && !strncmp(str, SCENE_MODE_HDR, strlen(SCENE_MODE_HDR))) {
+ updateParamEntry(KEY_SCENE_MODE, SCENE_MODE_AUTO);
+ m_bNeedRestart = true;
+ }
+
+ return NO_ERROR;
+ }
+ }
+ LOGH("Invalid hdr mode value: %s",
+ (hdrModeStr == NULL) ? "NULL" : hdrModeStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setSeeMore
+ *
+ * DESCRIPTION: set see more value
+ *
+ * PARAMETERS :
+ * @seeMoreStr : see more value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSeeMore(const char *seeMoreStr)
+{
+ int32_t rc = NO_ERROR;
+
+ LOGH("seeMoreStr =%s", seeMoreStr);
+ if (seeMoreStr != NULL) {
+ int value = lookupAttr(ON_OFF_MODES_MAP,
+ PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+ seeMoreStr);
+ if (value != NAME_NOT_FOUND) {
+ m_bSeeMoreOn = (value != 0);
+
+ // If SeeMore is enabled, enable StillMore for live snapshot
+ // and disable tone map
+ if (m_bSeeMoreOn) {
+ m_bStillMoreOn = TRUE;
+ if (!m_bLtmForSeeMoreEnabled) {
+ rc = setToneMapMode(false, false);
+ }
+ if (rc != NO_ERROR) {
+ LOGH("Failed to disable tone map during SeeMore");
+ }
+ } else {
+ m_bStillMoreOn = FALSE;
+ if (!m_bLtmForSeeMoreEnabled) {
+ rc = setToneMapMode(true, false);
+ }
+ if (rc != NO_ERROR) {
+ LOGH("Failed to enable tone map during SeeMore");
+ }
+ }
+ updateParamEntry(KEY_QC_SEE_MORE, seeMoreStr);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid see more value: %s",
+ (seeMoreStr == NULL) ? "NULL" : seeMoreStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setStillMore
+ *
+ * DESCRIPTION: set still more value
+ *
+ * PARAMETERS :
+ * @seeMoreStr : still more value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStillMore(const char *stillMoreStr)
+{
+ LOGH("stillMoreStr =%s", stillMoreStr);
+ if (stillMoreStr != NULL) {
+ int value = lookupAttr(STILL_MORE_MODES_MAP, PARAM_MAP_SIZE(STILL_MORE_MODES_MAP),
+ stillMoreStr);
+ if (value != NAME_NOT_FOUND) {
+ m_bStillMoreOn = (value != 0);
+ updateParamEntry(KEY_QC_STILL_MORE, stillMoreStr);
+
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid still more value: %s",
+ (stillMoreStr == NULL) ? "NULL" : stillMoreStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setHDRNeed1x
+ *
+ * DESCRIPTION: set hdr need 1x value
+ *
+ * PARAMETERS :
+ * @hdrModeStr : hdr need 1x value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRNeed1x(const char *hdrNeed1xStr)
+{
+ LOGH("hdrNeed1xStr =%s", hdrNeed1xStr);
+ if (hdrNeed1xStr != NULL) {
+ int value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+ hdrNeed1xStr);
+ if (value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_HDR_NEED_1X, hdrNeed1xStr);
+ m_bHDR1xFrameEnabled = !strncmp(hdrNeed1xStr, VALUE_TRUE, strlen(VALUE_TRUE));
+ m_bNeedRestart = true;
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HDR_NEED_1X,
+ m_bHDR1xFrameEnabled)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+
+ LOGH("Invalid hdr need 1x value: %s",
+ (hdrNeed1xStr == NULL) ? "NULL" : hdrNeed1xStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setAEBracketing
+ *
+ * DESCRIPTION: enables AE bracketing
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracketing()
+{
+ int32_t rc = NO_ERROR;
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HDR, m_AEBracketingClient)) {
+ LOGE("Failed to update AE bracketing");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to configure AE bracketing");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setHDRAEBracket
+ *
+ * DESCRIPTION: enables AE bracketing for HDR
+ *
+ * PARAMETERS :
+ * @hdrBracket : HDR bracketing configuration
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRAEBracket(cam_exp_bracketing_t hdrBracket)
+{
+ int32_t rc = NO_ERROR;
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HDR, hdrBracket)) {
+ LOGE("Failed to update table");
+ return BAD_TYPE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to configure HDR bracketing");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setCacheVideoBuffers
+ *
+ * DESCRIPTION: set cache video buffers value
+ *
+ * PARAMETERS :
+ * @cacheVideoStr : cache video buffer value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCacheVideoBuffers(const char *cacheVideoBufStr)
+{
+ if (cacheVideoBufStr != NULL) {
+ int8_t cacheVideoBuf = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), cacheVideoBufStr);
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.mem.usecache", prop, "");
+ if (strlen(prop) > 0) {
+ cacheVideoBuf = atoi(prop);
+ }
+ if (cacheVideoBuf != NAME_NOT_FOUND) {
+ const char *cacheStr = (strlen(prop)>0) ? prop : cacheVideoBufStr;
+ LOGD("Setting video buffer %s",
+ (cacheVideoBuf == 0) ? "UnCached" : "Cached");
+ return updateParamEntry(KEY_QC_CACHE_VIDEO_BUFFERS, cacheStr);
+ }
+ LOGE("Cache video buffers not set correctly");
+ }
+ return BAD_VALUE;
+}
+
+
+/*===========================================================================
+ * FUNCTION : setCacheVideoBuffers
+ *
+ * DESCRIPTION: Set buffers as Cache/Uncache Memory
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCacheVideoBuffers(const QCameraParameters& params)
+{
+ const char *str = params.get(KEY_QC_CACHE_VIDEO_BUFFERS);;
+ const char *prev_str = get(KEY_QC_CACHE_VIDEO_BUFFERS);
+
+ if (str != NULL) {
+ if (prev_str == NULL ||
+ strcmp(str, prev_str) != 0) {
+ return setCacheVideoBuffers(str);
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : restoreAEBracket
+ *
+ * DESCRIPTION: restores client AE bracketing configuration after HDR is done
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::stopAEBracket()
+{
+ cam_exp_bracketing_t bracketing;
+
+ bracketing.mode = CAM_EXP_BRACKETING_OFF;
+
+ return setHDRAEBracket(bracketing);
+}
+
+/*===========================================================================
+ * FUNCTION : updateFlash
+ *
+ * DESCRIPTION: restores client flash configuration or disables flash
+ *
+ * PARAMETERS :
+ * @commitSettings : flag indicating whether settings need to be commited
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFlash(bool commitSettings)
+{
+ int32_t rc = NO_ERROR;
+ int32_t value;
+
+ if (commitSettings) {
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ }
+
+ if (isHDREnabled() || m_bAeBracketingEnabled || m_bAFBracketingOn ||
+ m_bOptiZoomOn || m_bReFocusOn || m_LowLightLevel) {
+ value = CAM_FLASH_MODE_OFF;
+ } else if (m_bChromaFlashOn) {
+ value = CAM_FLASH_MODE_ON;
+ } else {
+ value = mFlashValue;
+ }
+
+ if (value != mFlashDaemonValue) {
+ LOGD("Setting Flash value %d", value);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_LED_MODE, value)) {
+ LOGE("Failed to set led mode");
+ return BAD_VALUE;
+ }
+ mFlashDaemonValue = value;
+ } else {
+ rc = NO_ERROR;
+ }
+
+ if (commitSettings) {
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to configure HDR bracketing");
+ return rc;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction value
+ *
+ * PARAMETERS :
+ * @redeyeStr : red eye reduction value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const char *redeyeStr)
+{
+ if (redeyeStr != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), redeyeStr);
+ if (value != NAME_NOT_FOUND) {
+ LOGD("Setting RedEye Reduce value %s", redeyeStr);
+ updateParamEntry(KEY_QC_REDEYE_REDUCTION, redeyeStr);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_REDEYE_REDUCTION, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid RedEye Reduce value: %s",
+ (redeyeStr == NULL) ? "NULL" : redeyeStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : getDenoiseProcessPlate
+ *
+ * DESCRIPTION: query denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : NR process plate vlaue
+ *==========================================================================*/
+cam_denoise_process_type_t
+ QCameraParameters::getDenoiseProcessPlate(cam_intf_parm_type_t type)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ cam_denoise_process_type_t processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+ if (CAM_INTF_PARM_WAVELET_DENOISE == type) {
+ property_get("persist.denoise.process.plates", prop, "");
+ } else if (CAM_INTF_PARM_TEMPORAL_DENOISE == type) {
+ property_get("persist.tnr.process.plates", prop, "");
+ } else {
+ LOGW("Type not supported");
+ prop[0] = '\0';
+ }
+ if (strlen(prop) > 0) {
+ switch(atoi(prop)) {
+ case 0:
+ processPlate = CAM_WAVELET_DENOISE_YCBCR_PLANE;
+ break;
+ case 1:
+ processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+ break;
+ case 2:
+ processPlate = CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+ break;
+ case 3:
+ processPlate = CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+ break;
+ default:
+ processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+ break;
+ }
+ }
+ return processPlate;
+}
+
+/*===========================================================================
+ * FUNCTION : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value
+ *
+ * PARAMETERS :
+ * @wnrStr : wavelet denoise value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const char *wnrStr)
+{
+ if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_DENOISE2D) == 0){
+ LOGH("WNR is not supported");
+ return NO_ERROR;
+ }
+
+ if (wnrStr != NULL) {
+ int value = lookupAttr(DENOISE_ON_OFF_MODES_MAP,
+ PARAM_MAP_SIZE(DENOISE_ON_OFF_MODES_MAP), wnrStr);
+ if (value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_DENOISE, wnrStr);
+
+ cam_denoise_param_t temp;
+ memset(&temp, 0, sizeof(temp));
+ temp.denoise_enable = (uint8_t)value;
+ m_bWNROn = (value != 0);
+ if (m_bWNROn) {
+ temp.process_plates = getDenoiseProcessPlate(CAM_INTF_PARM_WAVELET_DENOISE);
+ }
+ LOGD("Denoise enable=%d, plates=%d",
+ temp.denoise_enable, temp.process_plates);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WAVELET_DENOISE, temp)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Denoise value: %s", (wnrStr == NULL) ? "NULL" : wnrStr);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setRdiMode
+ *
+ * DESCRIPTION: set rdi mode value
+ *
+ * PARAMETERS :
+ * @str : rdi mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRdiMode(const char *str)
+{
+ LOGD("RDI_DEBUG rdi mode value: %s", str);
+
+ if (str != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+ if (value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_RDI_MODE, str);
+ m_bRdiMode = (value == 0) ? false : true;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_RDI_MODE, value)) {
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid rdi mode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+
+/*===========================================================================
+ * FUNCTION : setSecureMode
+ *
+ * DESCRIPTION: set secure mode value
+ *
+ * PARAMETERS :
+ * @str : secure mode value string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSecureMode(const char *str)
+{
+ LOGD("Secure mode value: %s", str);
+
+ if (str != NULL) {
+ int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+ PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+ if (value != NAME_NOT_FOUND) {
+ updateParamEntry(KEY_QC_SECURE_MODE, str);
+ m_bSecureMode = (value == 0)? false : true;
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Secure mode value: %s",
+ (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamRotation
+ *
+ * DESCRIPTION: get stream rotation by its type
+ *
+ * PARAMETERS :
+ * @streamType : stream type
+ * @featureConfig : stream feature config structure
+ * @dim : stream dimension
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamRotation(cam_stream_type_t streamType,
+ cam_pp_feature_config_t &featureConfig,
+ cam_dimension_t &dim)
+{
+ int32_t ret = NO_ERROR;
+ const char *str = get(KEY_QC_VIDEO_ROTATION);
+ int rotationParam = lookupAttr(VIDEO_ROTATION_MODES_MAP,
+ PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP), str);
+ featureConfig.rotation = ROTATE_0;
+ int swapDim = 0;
+ switch (streamType) {
+ case CAM_STREAM_TYPE_VIDEO:
+ switch(rotationParam) {
+ case 90:
+ featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+ featureConfig.rotation = ROTATE_90;
+ swapDim = 1;
+ break;
+ case 180:
+ featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+ featureConfig.rotation = ROTATE_180;
+ break;
+ case 270:
+ featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+ featureConfig.rotation = ROTATE_270;
+ swapDim = 1;
+ break;
+ default:
+ featureConfig.rotation = ROTATE_0;
+ }
+ break;
+ case CAM_STREAM_TYPE_PREVIEW:
+ case CAM_STREAM_TYPE_POSTVIEW:
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_RAW:
+ case CAM_STREAM_TYPE_METADATA:
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ case CAM_STREAM_TYPE_DEFAULT:
+ default:
+ break;
+ }
+
+ if (swapDim > 0) {
+ int w = 0;
+ w = dim.width;
+ dim.width = dim.height;
+ dim.height = w;
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamFormat
+ *
+ * DESCRIPTION: get stream format by its type
+ *
+ * PARAMETERS :
+ * @streamType : [input] stream type
+ * @format : [output] stream format
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamFormat(cam_stream_type_t streamType,
+ cam_format_t &format)
+{
+ int32_t ret = NO_ERROR;
+ format = CAM_FORMAT_MAX;
+ switch (streamType) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ if (!isUBWCEnabled()) {
+#if VENUS_PRESENT
+ cam_dimension_t preview;
+ cam_dimension_t video;
+ getStreamDimension(CAM_STREAM_TYPE_VIDEO , video);
+ getStreamDimension(CAM_STREAM_TYPE_PREVIEW, preview);
+ if (getRecordingHintValue() == true &&
+ video.width == preview.width &&
+ video.height == preview.height &&
+ mPreviewFormat == CAM_FORMAT_YUV_420_NV21) {
+ format = CAM_FORMAT_YUV_420_NV21_VENUS;
+ } else
+#endif
+ format = mPreviewFormat;
+ } else {
+ format = mPreviewFormat;
+ }
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ case CAM_STREAM_TYPE_CALLBACK:
+ format = mAppPreviewFormat;
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ cam_analysis_info_t analysisInfo;
+ cam_feature_mask_t featureMask;
+
+ featureMask = 0;
+ getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+ ret = getAnalysisInfo(
+ ((getRecordingHintValue() == true) && fdModeInVideo()),
+ FALSE,
+ featureMask,
+ &analysisInfo);
+ if (ret != NO_ERROR) {
+ LOGE("getAnalysisInfo failed, ret = %d", ret);
+ return ret;
+ }
+
+ if (analysisInfo.hw_analysis_supported &&
+ analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY) {
+ format = analysisInfo.analysis_format;
+ } else {
+ if (analysisInfo.hw_analysis_supported) {
+ LOGW("Invalid analysis_format %d\n",
+ analysisInfo.analysis_format);
+ }
+ format = mAppPreviewFormat;
+ }
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ if ( mPictureFormat == CAM_FORMAT_YUV_422_NV16 ) {
+ format = CAM_FORMAT_YUV_422_NV16;
+ } else {
+ char prop[PROPERTY_VALUE_MAX];
+ int snapshotFormat;
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snap.format", prop, "0");
+ snapshotFormat = atoi(prop);
+ if(snapshotFormat == 1) {
+ format = CAM_FORMAT_YUV_422_NV61;
+ } else {
+ format = CAM_FORMAT_YUV_420_NV21;
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ if (isUBWCEnabled()) {
+ char prop[PROPERTY_VALUE_MAX];
+ int pFormat;
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.video.ubwc", prop, "1");
+ pFormat = atoi(prop);
+ if (pFormat == 1) {
+ format = CAM_FORMAT_YUV_420_NV12_UBWC;
+ } else {
+ format = CAM_FORMAT_YUV_420_NV21_VENUS;
+ }
+ } else {
+#if VENUS_PRESENT
+ format = CAM_FORMAT_YUV_420_NV21_VENUS;
+#else
+ format = CAM_FORMAT_YUV_420_NV21;
+#endif
+ }
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ if ((isRdiMode()) || (getofflineRAW())|| (getQuadraCfa())) {
+ format = m_pCapability->rdi_mode_stream_fmt;
+ } else if (mPictureFormat >= CAM_FORMAT_YUV_RAW_8BIT_YUYV) {
+ format = (cam_format_t)mPictureFormat;
+ } else {
+ char raw_format[PROPERTY_VALUE_MAX];
+ int rawFormat;
+ memset(raw_format, 0, sizeof(raw_format));
+ /*Default value is CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG*/
+ property_get("persist.camera.raw.format", raw_format, "17");
+ rawFormat = atoi(raw_format);
+ format = (cam_format_t)rawFormat;
+ LOGH("Raw stream format %d bundled with snapshot",
+ format);
+ }
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ if (getQuadraCfa()) {
+ format = m_pCapability->quadra_cfa_format;
+ }
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ case CAM_STREAM_TYPE_DEFAULT:
+ default:
+ break;
+ }
+
+ LOGD("Stream type = %d Stream Format = %d", streamType, format);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getFlipMode
+ *
+ * DESCRIPTION: get flip mode
+ *
+ * PARAMETERS :
+ * @cam_intf_parm_type_t : [input] stream type
+ *
+ * RETURN : int type of flip mode
+ * 0 - no filp
+ * 1 - FLIP_H
+ * 2 - FLIP_V
+ * 3 - FLIP_H | FLIP_V
+ *==========================================================================*/
+int QCameraParameters::getFlipMode(cam_stream_type_t type)
+{
+ const char *str = NULL;
+ int flipMode = 0; // no flip
+
+ switch(type){
+ case CAM_STREAM_TYPE_PREVIEW:
+ if (!isRdiMode()) {
+ str = get(KEY_QC_PREVIEW_FLIP);
+ }
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ str = get(KEY_QC_VIDEO_FLIP);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_POSTVIEW:
+ str = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+ break;
+ default:
+ LOGD("No flip mode for stream type %d", type);
+ break;
+ }
+
+ if(str != NULL){
+ //Need give corresponding filp value based on flip mode strings
+ int value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+ if(value != NAME_NOT_FOUND)
+ flipMode = value;
+ }
+
+ LOGH("the filp mode of stream type %d is %d .", type, flipMode);
+ return flipMode;
+}
+
+/*===========================================================================
+ * FUNCTION : isSnapshotFDNeeded
+ *
+ * DESCRIPTION: check whether Face Detection Metadata is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : bool type of status
+ * 0 - need
+ * 1 - not need
+ *==========================================================================*/
+bool QCameraParameters::isSnapshotFDNeeded()
+{
+ return getInt(KEY_QC_SNAPSHOT_FD_DATA);
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamDimension
+ *
+ * DESCRIPTION: get stream dimension by its type
+ *
+ * PARAMETERS :
+ * @streamType : [input] stream type
+ * @dim : [output] stream dimension
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamDimension(cam_stream_type_t streamType,
+ cam_dimension_t &dim)
+{
+ int32_t ret = NO_ERROR;
+ memset(&dim, 0, sizeof(cam_dimension_t));
+
+ switch (streamType) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ case CAM_STREAM_TYPE_CALLBACK:
+ getPreviewSize(&dim.width, &dim.height);
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ getPreviewSize(&dim.width, &dim.height);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ if (isPostProcScaling()) {
+ getMaxPicSize(dim);
+ } else if (getRecordingHintValue()) {
+ // live snapshot
+ getLiveSnapshotSize(dim);
+ } else {
+ getPictureSize(&dim.width, &dim.height);
+ }
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ getVideoSize(&dim.width, &dim.height);
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ //dim = m_pCapability->raw_dim;
+ getRawSize(dim);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ dim.width = (int32_t)sizeof(metadata_buffer_t);
+ dim.height = 1;
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ if (isPostProcScaling()) {
+ if (getRecordingHintValue()) {
+ // live snapshot
+ getLiveSnapshotSize(dim);
+ } else {
+ getPictureSize(&dim.width, &dim.height);
+ }
+ }
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ cam_dimension_t prv_dim, max_dim;
+
+ /* Analysis stream need aspect ratio as preview stream */
+ getPreviewSize(&prv_dim.width, &prv_dim.height);
+
+ cam_analysis_info_t analysisInfo;
+ cam_feature_mask_t featureMask;
+
+ featureMask = 0;
+ getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+ ret = getAnalysisInfo(
+ ((getRecordingHintValue() == true) && fdModeInVideo()),
+ FALSE,
+ featureMask,
+ &analysisInfo);
+ if (ret != NO_ERROR) {
+ LOGE("getAnalysisInfo failed, ret = %d", ret);
+ return ret;
+ }
+
+ max_dim.width = analysisInfo.analysis_max_res.width;
+ max_dim.height = analysisInfo.analysis_max_res.height;
+
+ if (prv_dim.width > max_dim.width || prv_dim.height > max_dim.height) {
+ double max_ratio, requested_ratio;
+
+ max_ratio = (double)max_dim.width / (double)max_dim.height;
+ requested_ratio = (double)prv_dim.width / (double)prv_dim.height;
+
+ if (max_ratio < requested_ratio) {
+ dim.width = max_dim.width;
+ dim.height = (int32_t)((double)dim.width / requested_ratio);
+ } else {
+ dim.height = max_dim.height;
+ dim.width = (int32_t)((double)max_dim.height * requested_ratio);
+ }
+ dim.width &= ~0x1;
+ dim.height &= ~0x1;
+ } else {
+ dim.width = prv_dim.width;
+ dim.height = prv_dim.height;
+ }
+ break;
+ case CAM_STREAM_TYPE_DEFAULT:
+ default:
+ LOGE("no dimension for unsupported stream type %d",
+ streamType);
+ ret = BAD_VALUE;
+ break;
+ }
+
+ LOGD("Stream type = %d Stream Dimension = %d X %d",
+ streamType, dim.width, dim.height);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getParameters
+ *
+ * DESCRIPTION: Return a C string containing the parameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : a string containing parameter pairs
+ *==========================================================================*/
+char* QCameraParameters::getParameters()
+{
+ char* strParams = NULL;
+ String8 str;
+
+ int cur_width, cur_height;
+ //Need take care Scale picture size
+ if(m_reprocScaleParam.isScaleEnabled() &&
+ m_reprocScaleParam.isUnderScaling()){
+ int scale_width, scale_height;
+
+ m_reprocScaleParam.getPicSizeFromAPK(scale_width,scale_height);
+ getPictureSize(&cur_width, &cur_height);
+
+ String8 pic_size;
+ char buffer[32];
+ snprintf(buffer, sizeof(buffer), "%dx%d", scale_width, scale_height);
+ pic_size.append(buffer);
+ set(CameraParameters::KEY_PICTURE_SIZE, pic_size);
+ }
+
+ str = flatten();
+ strParams = (char *)malloc(sizeof(char)*(str.length()+1));
+ if(strParams != NULL){
+ memset(strParams, 0, sizeof(char)*(str.length()+1));
+ strlcpy(strParams, str.string(), str.length()+1);
+ strParams[str.length()] = 0;
+ }
+
+ if(m_reprocScaleParam.isScaleEnabled() &&
+ m_reprocScaleParam.isUnderScaling()){
+ //need set back picture size
+ String8 pic_size;
+ char buffer[32];
+ snprintf(buffer, sizeof(buffer), "%dx%d", cur_width, cur_height);
+ pic_size.append(buffer);
+ set(CameraParameters::KEY_PICTURE_SIZE, pic_size);
+ }
+ return strParams;
+}
+
+#ifdef TARGET_TS_MAKEUP
+/*===========================================================================
+ * FUNCTION : getTsMakeupInfo
+ *
+ * DESCRIPTION: get TsMakeup info
+ *
+ * PARAMETERS :
+ * @whiteLevel : [output] white level
+ * @cleanLevel : [output] clean level
+
+ * RETURN : Whether makeup is enabled or not
+ *==========================================================================*/
+bool QCameraParameters::getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const
+{
+ const char* pch_makeup_enable = get(QCameraParameters::KEY_TS_MAKEUP);
+ if (pch_makeup_enable == NULL) {
+ LOGH("pch_makeup_enable = null");
+ return false;
+ }
+ bool enableMakeup =
+ (strcmp(pch_makeup_enable,"On") == 0);
+ if (enableMakeup) {
+ whiteLevel = getInt(QCameraParameters::KEY_TS_MAKEUP_WHITEN);
+ cleanLevel = getInt(QCameraParameters::KEY_TS_MAKEUP_CLEAN);
+ }
+ return enableMakeup;
+}
+#endif
+
+/*===========================================================================
+ * FUNCTION : getPreviewHalPixelFormat
+ *
+ * DESCRIPTION: get preview HAL pixel format
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : HAL pixel format
+ *==========================================================================*/
+int QCameraParameters::getPreviewHalPixelFormat()
+{
+ int32_t halPixelFormat;
+ cam_format_t fmt;
+ getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+ break;
+ case CAM_FORMAT_YUV_420_NV21:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+ break;
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ halPixelFormat = HAL_PIXEL_FORMAT_YV12;
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS;
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_VENUS;
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS_UBWC;
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ default:
+ halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+ break;
+ }
+ LOGH("format %d\n", halPixelFormat);
+ return halPixelFormat;
+}
+
+/*===========================================================================
+ * FUNCTION : getQuadraCFA
+ *
+ * DESCRIPTION: get QuadraCFA mode
+ *
+ * PARAMETERS :
+ *
+ * RETURN : bool
+ *==========================================================================*/
+bool QCameraParameters::getQuadraCfa()
+{
+ return m_bQuadraCfa;
+}
+/*===========================================================================
+ * FUNCTION : getthumbnailSize
+ *
+ * DESCRIPTION: get thumbnail size
+ *
+ * PARAMETERS :
+ * @width, height : [output] thumbnail width and height
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::getThumbnailSize(int *width, int *height) const
+{
+ *width = getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+ *height = getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+}
+
+/*===========================================================================
+ * FUNCTION : getZSLBurstInterval
+ *
+ * DESCRIPTION: get ZSL burst interval setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : ZSL burst interval value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLBurstInterval()
+{
+ int interval = getInt(KEY_QC_ZSL_BURST_INTERVAL);
+ if (interval < 0) {
+ interval = 1;
+ }
+ return (uint8_t)interval;
+}
+
+/*===========================================================================
+ * FUNCTION : getZSLQueueDepth
+ *
+ * DESCRIPTION: get ZSL queue depth
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : ZSL queue depth value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLQueueDepth()
+{
+ int qdepth = getInt(KEY_QC_ZSL_QUEUE_DEPTH);
+ if (qdepth < 0) {
+ qdepth = 2;
+ }
+ if (isLowMemoryDevice()) {
+ qdepth = 1;
+ }
+ return (uint8_t)qdepth;
+}
+
+/*===========================================================================
+ * FUNCTION : getZSLBackLookCount
+ *
+ * DESCRIPTION: get ZSL backlook count setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : ZSL backlook count value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLBackLookCount()
+{
+ int look_back = getInt(KEY_QC_ZSL_BURST_LOOKBACK);
+ if (look_back < 0) {
+ look_back = 2;
+ }
+ if (isLowMemoryDevice()) {
+ look_back = 1;
+ }
+ return (uint8_t)look_back;
+}
+/*===========================================================================
+ * FUNCTION : isVideoBuffersCached
+ *
+ * DESCRIPTION: Query buffers are cached /un cached
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : buffers are cached /un cached
+ *==========================================================================*/
+bool QCameraParameters::isVideoBuffersCached()
+{
+ const char *cached_mem = get(KEY_QC_CACHE_VIDEO_BUFFERS);
+ if (cached_mem != NULL) {
+ if (strcmp(cached_mem, VALUE_DISABLE) != 0) {
+ return true;
+ }
+ }
+ return false;
+}
+/*===========================================================================
+ * FUNCTION : getZSLMaxUnmatchedFrames
+ *
+ * DESCRIPTION: get allowed ZSL max unmatched frames number
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : ZSL backlook count value
+ *==========================================================================*/
+uint8_t QCameraParameters::getMaxUnmatchedFramesInQueue()
+{
+ return (uint8_t)(m_pCapability->min_num_pp_bufs);
+}
+
+/*===========================================================================
+ * FUNCTION : setRecordingHintValue
+ *
+ * DESCRIPTION: set recording hint
+ *
+ * PARAMETERS :
+ * @value : video hint value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::setRecordingHintValue(int32_t value)
+{
+ LOGH("VideoHint = %d", value);
+ bool newValue = (value > 0)? true : false;
+
+ if ( m_bRecordingHint != newValue ) {
+ m_bNeedRestart = true;
+ m_bRecordingHint_new = newValue;
+ } else {
+ m_bRecordingHint_new = m_bRecordingHint;
+ }
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_RECORDING_HINT, value)) {
+ return BAD_VALUE;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfSnapshots
+ *
+ * DESCRIPTION: get number of snapshot per shutter
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of snapshot per shutter
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfSnapshots()
+{
+ uint8_t numOfSnapshot = 1;
+ int val = getInt(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER);
+ if (0 < val) {
+ numOfSnapshot = (uint8_t)val;
+ }
+
+ return (uint8_t)numOfSnapshot;
+}
+
+/*===========================================================================
+ * FUNCTION : getBurstCountForAdvancedCapture
+ *
+ * DESCRIPTION: get burst count for advanced capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of snapshot required for advanced capture.
+ *==========================================================================*/
+uint8_t QCameraParameters::getBurstCountForAdvancedCapture()
+{
+ uint32_t burstCount = 0;
+ if (isUbiFocusEnabled()) {
+ //number of snapshots required for Ubi Focus.
+ burstCount = m_pCapability->ubifocus_af_bracketing_need.burst_count;
+ } else if (isUbiRefocus()) {
+ //number of snapshots required for Opti Zoom.
+ burstCount = m_pCapability->refocus_af_bracketing_need.burst_count;
+ } else if (isOptiZoomEnabled()) {
+ //number of snapshots required for Opti Zoom.
+ burstCount = m_pCapability->opti_zoom_settings_need.burst_count;
+ } else if (isChromaFlashEnabled()) {
+ //number of snapshots required for Chroma Flash.
+ burstCount = m_pCapability->chroma_flash_settings_need.burst_count;
+ } else if (isStillMoreEnabled()) {
+ //number of snapshots required for Still More.
+ if (isSeeMoreEnabled()) {
+ burstCount = 1;
+ } else if ((m_stillmore_config.burst_count >=
+ m_pCapability->stillmore_settings_need.min_burst_count) &&
+ (m_stillmore_config.burst_count <=
+ m_pCapability->stillmore_settings_need.max_burst_count)) {
+ burstCount = m_stillmore_config.burst_count;
+ } else {
+ burstCount = m_pCapability->stillmore_settings_need.burst_count;
+ }
+ } else if (isHDREnabled()) {
+ //number of snapshots required for HDR.
+ burstCount = m_pCapability->hdr_bracketing_setting.num_frames;
+ } else if (isAEBracketEnabled()) {
+ burstCount = 0;
+ const char *str_val = m_AEBracketingClient.values;
+ if ((str_val != NULL) && (strlen(str_val) > 0)) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ strlcpy(prop, str_val, PROPERTY_VALUE_MAX);
+ char *saveptr = NULL;
+ char *token = strtok_r(prop, ",", &saveptr);
+ while (token != NULL) {
+ token = strtok_r(NULL, ",", &saveptr);
+ burstCount++;
+ }
+ }
+ }
+
+ if (burstCount <= 0) {
+ burstCount = getNumOfSnapshots();
+ }
+
+ LOGH("Snapshot burst count = %d", burstCount);
+ return (uint8_t)burstCount;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfRetroSnapshots
+ *
+ * DESCRIPTION: get number of retro active snapshots per shutter
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of retro active snapshots per shutter
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfRetroSnapshots()
+{
+ int numOfRetroSnapshots = getInt(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER);
+ if (numOfRetroSnapshots < 0) {
+ numOfRetroSnapshots = 0;
+ }
+ LOGH("numOfRetroSnaps - %d", numOfRetroSnapshots);
+ return (uint8_t)numOfRetroSnapshots;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfExtraHDRInBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra input buffers needed by HDR
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of extra buffers needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDRInBufsIfNeeded()
+{
+ unsigned int numOfBufs = 0;
+
+ if (isHDREnabled()) {
+ numOfBufs += m_pCapability->hdr_bracketing_setting.num_frames;
+ if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+ numOfBufs++;
+ }
+ numOfBufs--; // Only additional buffers need to be returned
+ }
+
+ return (uint8_t)(numOfBufs);
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfExtraHDROutBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra output buffers needed by HDR
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of extra buffers needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDROutBufsIfNeeded()
+{
+ int numOfBufs = 0;
+
+ if (isHDREnabled() && isHDR1xFrameEnabled()) {
+ numOfBufs++;
+ }
+
+ return (uint8_t)(numOfBufs);
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegQuality
+ *
+ * DESCRIPTION: get jpeg encoding quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : jpeg encoding quality
+ *==========================================================================*/
+uint32_t QCameraParameters::getJpegQuality()
+{
+ int quality = getInt(KEY_JPEG_QUALITY);
+ if (quality < 0) {
+ quality = 85; // set to default quality value
+ }
+ return (uint32_t)quality;
+}
+
+/*===========================================================================
+ * FUNCTION : getRotation
+ *
+ * DESCRIPTION: get application configured rotation
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getRotation() {
+ int rotation = 0;
+
+ //If exif rotation is set, do not rotate captured image
+ if (!useJpegExifRotation()) {
+ rotation = mRotation;
+ if (rotation < 0) {
+ rotation = 0;
+ }
+ }
+ return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegRotation
+ *
+ * DESCRIPTION: set jpeg rotation value configured internally
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : jpeg rotation value
+ *==========================================================================*/
+void QCameraParameters::setJpegRotation(int rotation) {
+ if (rotation == 0 || rotation == 90 ||
+ rotation == 180 || rotation == 270) {
+ mJpegRotation = (uint32_t)rotation;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getDeviceRotation
+ *
+ * DESCRIPTION: get device rotation value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : device rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getDeviceRotation() {
+ int rotation = 0;
+
+ rotation = mRotation;
+ if (rotation < 0) {
+ rotation = 0;
+ }
+
+ return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegExifRotation
+ *
+ * DESCRIPTION: get exif rotation value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getJpegExifRotation() {
+ int rotation = 0;
+
+ if (useJpegExifRotation()) {
+ rotation = mRotation;
+ if (rotation < 0) {
+ rotation = 0;
+ }
+ }
+ return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION : useJpegExifRotation
+ *
+ * DESCRIPTION: Check if jpeg exif rotation need to be used
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true if jpeg exif rotation need to be used
+ *==========================================================================*/
+bool QCameraParameters::useJpegExifRotation() {
+ char exifRotation[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.exif.rotation", exifRotation, "off");
+
+ if (!strcmp(exifRotation, "on")) {
+ return true;
+ }
+
+ if (!(m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
+ return true;
+ }
+
+ return false;
+}
+
+/*===========================================================================
+ * FUNCTION : getEffectValue
+ *
+ * DESCRIPTION: get effect value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : effect value
+ *==========================================================================*/
+int32_t QCameraParameters::getEffectValue()
+{
+ uint32_t cnt = 0;
+ const char *effect = get(KEY_EFFECT);
+ if (effect) {
+ while (NULL != EFFECT_MODES_MAP[cnt].desc) {
+ if (!strcmp(EFFECT_MODES_MAP[cnt].desc, effect)) {
+ return EFFECT_MODES_MAP[cnt].val;
+ }
+ cnt++;
+ }
+ } else {
+ LOGW("Missing effect value");
+ }
+ return CAM_EFFECT_MODE_OFF;
+}
+
+/*===========================================================================
+ * FUNCTION : parseGPSCoordinate
+ *
+ * DESCRIPTION: parse GPS coordinate string
+ *
+ * PARAMETERS :
+ * @coord_str : [input] coordinate string
+ * @coord : [output] ptr to struct to store coordinate
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::parseGPSCoordinate(const char *coord_str, rat_t* coord)
+{
+ if(coord == NULL) {
+ LOGE("error, invalid argument coord == NULL");
+ return BAD_VALUE;
+ }
+ double degF = atof(coord_str);
+ if (degF < 0) {
+ degF = -degF;
+ }
+ double minF = (degF - (double)(int) degF) * 60.0;
+ double secF = (minF - (double)(int) minF) * 60.0;
+
+ getRational(&coord[0], (int)degF, 1);
+ getRational(&coord[1], (int)minF, 1);
+ getRational(&coord[2], (int)(secF * 10000.0), 10000);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getExifDateTime
+ *
+ * DESCRIPTION: query exif date time
+ *
+ * PARAMETERS :
+ * @dateTime : String to store exif date time.
+ * Should be leaved unchanged in case of error.
+ * @subsecTime : String to store exif time nanoseconds.
+ * Should be leaved unchanged in case of error.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifDateTime(String8 &dateTime, String8 &subsecTime)
+{
+ int32_t ret = NO_ERROR;
+
+ //get time and date from system
+ struct timeval tv;
+ struct tm timeinfo_data;
+
+ int res = gettimeofday(&tv, NULL);
+ if (0 == res) {
+ struct tm *timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
+ if (NULL != timeinfo) {
+ //Write datetime according to EXIF Spec
+ //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+ dateTime = String8::format("%04d:%02d:%02d %02d:%02d:%02d",
+ timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+ timeinfo->tm_mday, timeinfo->tm_hour,
+ timeinfo->tm_min, timeinfo->tm_sec);
+ //Write subsec according to EXIF Sepc
+ subsecTime = String8::format("%06ld", tv.tv_usec);
+ } else {
+ LOGE("localtime_r() error");
+ ret = UNKNOWN_ERROR;
+ }
+ } else if (-1 == res) {
+ LOGE("gettimeofday() error: %s", strerror(errno));
+ ret = UNKNOWN_ERROR;
+ } else {
+ LOGE("gettimeofday() unexpected return code: %d", res);
+ ret = UNKNOWN_ERROR;
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getRational
+ *
+ * DESCRIPTION: compose rational struct
+ *
+ * PARAMETERS :
+ * @rat : ptr to struct to store rational info
+ * @num :num of the rational
+ * @denom : denom of the rational
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getRational(rat_t *rat, int num, int denom)
+{
+ if ((0 > num) || (0 > denom)) {
+ LOGE("Negative values");
+ return BAD_VALUE;
+ }
+ if (NULL == rat) {
+ LOGE("NULL rat input");
+ return BAD_VALUE;
+ }
+ rat->num = (uint32_t)num;
+ rat->denom = (uint32_t)denom;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getExifFocalLength
+ *
+ * DESCRIPTION: get exif focal lenght
+ *
+ * PARAMETERS :
+ * @focalLength : ptr to rational strcut to store focal lenght
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifFocalLength(rat_t *focalLength)
+{
+ int focalLengthValue =
+ (int)(getFloat(QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+ return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+}
+
+/*===========================================================================
+ * FUNCTION : getExifIsoSpeed
+ *
+ * DESCRIPTION: get exif ISO speed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : ISO speed value
+ *==========================================================================*/
+uint16_t QCameraParameters::getExifIsoSpeed()
+{
+ uint16_t isoSpeed = 0;
+ const char *iso_str = get(QCameraParameters::KEY_QC_ISO_MODE);
+ int iso_index = lookupAttr(ISO_MODES_MAP, PARAM_MAP_SIZE(ISO_MODES_MAP), iso_str);
+ switch (iso_index) {
+ case CAM_ISO_MODE_AUTO:
+ isoSpeed = 0;
+ break;
+ case CAM_ISO_MODE_DEBLUR:
+ isoSpeed = 1;
+ break;
+ case CAM_ISO_MODE_100:
+ isoSpeed = 100;
+ break;
+ case CAM_ISO_MODE_200:
+ isoSpeed = 200;
+ break;
+ case CAM_ISO_MODE_400:
+ isoSpeed = 400;
+ break;
+ case CAM_ISO_MODE_800:
+ isoSpeed = 800;
+ break;
+ case CAM_ISO_MODE_1600:
+ isoSpeed = 1600;
+ break;
+ case CAM_ISO_MODE_3200:
+ isoSpeed = 3200;
+ break;
+ }
+ return isoSpeed;
+}
+
+/*===========================================================================
+ * FUNCTION : getExifGpsProcessingMethod
+ *
+ * DESCRIPTION: get GPS processing method
+ *
+ * PARAMETERS :
+ * @gpsProcessingMethod : string to store GPS process method
+ * @count : lenght of the string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsProcessingMethod(char *gpsProcessingMethod,
+ uint32_t &count)
+{
+ const char *str = get(KEY_GPS_PROCESSING_METHOD);
+ if(str != NULL) {
+ memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+ count = EXIF_ASCII_PREFIX_SIZE;
+ strlcpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str, GPS_PROCESSING_METHOD_SIZE);
+ count += (uint32_t)strlen(str);
+ gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
+ return NO_ERROR;
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getExifLatitude
+ *
+ * DESCRIPTION: get exif latitude
+ *
+ * PARAMETERS :
+ * @latitude : ptr to rational struct to store latitude info
+ * @ladRef : charater to indicate latitude reference
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLatitude(rat_t *latitude,
+ char *latRef)
+{
+ const char *str = get(KEY_GPS_LATITUDE);
+ if(str != NULL) {
+ parseGPSCoordinate(str, latitude);
+
+ //set Latitude Ref
+ float latitudeValue = getFloat(KEY_GPS_LATITUDE);
+ if(latitudeValue < 0.0f) {
+ latRef[0] = 'S';
+ } else {
+ latRef[0] = 'N';
+ }
+ latRef[1] = '\0';
+ return NO_ERROR;
+ }else{
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getExifLongitude
+ *
+ * DESCRIPTION: get exif longitude
+ *
+ * PARAMETERS :
+ * @longitude : ptr to rational struct to store longitude info
+ * @lonRef : charater to indicate longitude reference
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLongitude(rat_t *longitude,
+ char *lonRef)
+{
+ const char *str = get(KEY_GPS_LONGITUDE);
+ if(str != NULL) {
+ parseGPSCoordinate(str, longitude);
+
+ //set Longitude Ref
+ float longitudeValue = getFloat(KEY_GPS_LONGITUDE);
+ if(longitudeValue < 0.0f) {
+ lonRef[0] = 'W';
+ } else {
+ lonRef[0] = 'E';
+ }
+ lonRef[1] = '\0';
+ return NO_ERROR;
+ }else{
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getExifAltitude
+ *
+ * DESCRIPTION: get exif altitude
+ *
+ * PARAMETERS :
+ * @altitude : ptr to rational struct to store altitude info
+ * @altRef : charater to indicate altitude reference
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifAltitude(rat_t *altitude,
+ char *altRef)
+{
+ const char *str = get(KEY_GPS_ALTITUDE);
+ if(str != NULL) {
+ double value = atof(str);
+ *altRef = 0;
+ if(value < 0){
+ *altRef = 1;
+ value = -value;
+ }
+ return getRational(altitude, (int)(value*1000), 1000);
+ }else{
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getExifGpsDateTimeStamp
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ * @gpsDateStamp : GPS date time stamp string
+ * @bufLen : length of the string
+ * @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsDateTimeStamp(char *gpsDateStamp,
+ uint32_t bufLen,
+ rat_t *gpsTimeStamp)
+{
+ const char *str = get(KEY_GPS_TIMESTAMP);
+ if(str != NULL) {
+ time_t unixTime = (time_t)atol(str);
+ struct tm *UTCTimestamp = gmtime(&unixTime);
+
+ if(!UTCTimestamp) {
+ LOGE("UTCTimestamp is null\n");
+ return BAD_VALUE;
+ }
+
+ strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
+
+ getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
+ getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
+ getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
+
+ return NO_ERROR;
+ } else {
+ return BAD_VALUE;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : updateFocusDistances
+ *
+ * DESCRIPTION: update focus distances
+ *
+ * PARAMETERS :
+ * @focusDistances : ptr to focus distance info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFocusDistances(cam_focus_distances_info_t *focusDistances)
+{
+ String8 str;
+ char buffer[32] = {0};
+ //set all distances to infinity if focus mode is infinity
+ if(mFocusMode == CAM_FOCUS_MODE_INFINITY) {
+ str.append("Infinity,Infinity,Infinity");
+ } else {
+ if (focusDistances->focus_distance[0] < FOCUS_PERCISION) {
+ str.append("Infinity");
+ } else {
+ snprintf(buffer, sizeof(buffer), "%f", 1.0/focusDistances->focus_distance[0]);
+ str.append(buffer);
+ }
+ if (focusDistances->focus_distance[1] < FOCUS_PERCISION) {
+ str.append(",Infinity");
+ } else {
+ snprintf(buffer, sizeof(buffer), ",%f", 1.0/focusDistances->focus_distance[1]);
+ str.append(buffer);
+ }
+ if (focusDistances->focus_distance[2] < FOCUS_PERCISION) {
+ str.append(",Infinity");
+ } else {
+ snprintf(buffer, sizeof(buffer), ",%f", 1.0/focusDistances->focus_distance[2]);
+ str.append(buffer);
+ }
+ }
+ LOGH("setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+ set(QCameraParameters::KEY_FOCUS_DISTANCES, str.string());
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : updateRecordingHintValue
+ *
+ * DESCRIPTION: update recording hint locally and to daemon
+ *
+ * PARAMETERS :
+ * @value : video hint value
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateRecordingHintValue(int32_t value)
+{
+ int32_t rc = NO_ERROR;
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ rc = setRecordingHintValue(value);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to update table");
+ return rc;
+ }
+
+ if(m_bDISEnabled && (value==1)) {
+ LOGH("%d: Setting DIS value again!!");
+ setDISValue(VALUE_ENABLE);
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to update recording hint");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setHistogram
+ *
+ * DESCRIPTION: set histogram
+ *
+ * PARAMETERS :
+ * @enabled : if histogram is enabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHistogram(bool enabled)
+{
+ if(m_bHistogramEnabled == enabled) {
+ LOGH("histogram flag not changed, no ops here");
+ return NO_ERROR;
+ }
+
+ // set parm for histogram
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ int32_t value = enabled ? 1 : 0;
+ int32_t rc = NO_ERROR;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HISTOGRAM, value)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set histogram");
+ return rc;
+ }
+
+ m_bHistogramEnabled = enabled;
+
+ LOGH("Histogram -> %s", m_bHistogramEnabled ? "Enabled" : "Disabled");
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setIntEvent
+ *
+ * DESCRIPTION: set setIntEvent
+ *
+ * PARAMETERS :
+ * @params : image size and dimensions
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setIntEvent(cam_int_evt_params_t params)
+{
+ int32_t rc = NO_ERROR;
+
+ if ( m_pParamBuf == NULL ) {
+ return NO_INIT;
+ }
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ //Sending snapshot taken notification back to Eztune"
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_INT_EVT, params)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set frameskip info parm");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setFaceDetectionOption
+ *
+ * DESCRIPTION: set if face detection is enabled by SendCommand
+ *
+ * PARAMETERS :
+ * @enabled : bool flag if face detection should be enabled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+ int32_t QCameraParameters::setFaceDetectionOption(bool enabled)
+{
+ m_bFaceDetectionOn = enabled;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setFaceDetection
+ *
+ * DESCRIPTION: set face detection
+ *
+ * PARAMETERS :
+ * @enabled : if face detection is enabled
+ * @initCommit : if configuration list need to be initialized and commited
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceDetection(bool enabled, bool initCommit)
+{
+ uint32_t faceProcMask = m_nFaceProcMask;
+ // set face detection mask
+ if (enabled) {
+ if (m_pCapability->max_num_roi == 0) {
+ LOGE("Face detection is not support becuase max number of face is 0");
+ return BAD_VALUE;
+ }
+ faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+ if (getRecordingHintValue() > 0) {
+ faceProcMask = 0;
+ faceProcMask |= CAM_FACE_PROCESS_MASK_FOCUS;
+ if (fdModeInVideo() == CAM_FACE_PROCESS_MASK_DETECTION) {
+ faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+ }
+ } else {
+ faceProcMask |= CAM_FACE_PROCESS_MASK_FOCUS;
+ faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+ }
+ if (isTruePortraitEnabled()) {
+ LOGL("QCameraParameters::setFaceDetection trueportrait enabled");
+ faceProcMask |= CAM_FACE_PROCESS_MASK_GAZE;
+ } else {
+ LOGL("QCameraParameters::setFaceDetection trueportrait disabled");
+ faceProcMask &= ~CAM_FACE_PROCESS_MASK_GAZE;
+ }
+ } else {
+ faceProcMask &= ~(CAM_FACE_PROCESS_MASK_DETECTION
+ | CAM_FACE_PROCESS_MASK_FOCUS
+ | CAM_FACE_PROCESS_MASK_GAZE);
+ }
+
+ if(m_nFaceProcMask == faceProcMask) {
+ LOGH("face process mask not changed, no ops here");
+ return NO_ERROR;
+ }
+
+ m_nFaceProcMask = faceProcMask;
+
+ // set parm for face detection
+ uint32_t requested_faces = (uint32_t)getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+ cam_fd_set_parm_t fd_set_parm;
+ memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+ fd_set_parm.fd_mode = faceProcMask;
+ fd_set_parm.num_fd = requested_faces;
+
+ LOGH("[KPI Perf]: PROFILE_FACE_DETECTION_VALUE = %d num_fd = %d",
+ faceProcMask,requested_faces);
+
+ if (initCommit) {
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+ }
+
+ int32_t rc = NO_ERROR;
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FD, fd_set_parm)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ if (initCommit) {
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set face detection parm");
+ return rc;
+ }
+ }
+
+ LOGH("FaceProcMask -> %d", m_nFaceProcMask);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setFrameSkip
+ *
+ * DESCRIPTION: send ISP frame skip pattern to camera daemon
+ *
+ * PARAMETERS :
+ * @pattern : skip pattern for ISP
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFrameSkip(enum msm_vfe_frame_skip_pattern pattern)
+{
+ int32_t rc = NO_ERROR;
+
+ if ( m_pParamBuf == NULL ) {
+ return NO_INIT;
+ }
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FRAMESKIP, (int32_t)pattern)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set frameskip info parm");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : updateRAW
+ *
+ * DESCRIPTION: Query sensor output size based on maximum stream dimension
+ *
+ * PARAMETERS :
+ * @max_dim : maximum stream dimension
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateRAW(cam_dimension_t max_dim)
+{
+ int32_t rc = NO_ERROR;
+ cam_dimension_t raw_dim, pic_dim;
+
+ // If offline raw is enabled, check the dimensions from Picture size since snapshot
+ // stream is not added but final JPEG is required of snapshot size
+ if (getofflineRAW()) {
+ if (getQuadraCfa()) {
+ max_dim.width = m_pCapability->quadra_cfa_dim[0].width;
+ max_dim.height = m_pCapability->quadra_cfa_dim[0].height;
+ } else {
+ getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, pic_dim);
+ if (pic_dim.width > max_dim.width) {
+ max_dim.width = pic_dim.width;
+ }
+ if (pic_dim.height > max_dim.height) {
+ max_dim.height = pic_dim.height;
+ }
+ }
+ }
+
+ if (max_dim.width == 0 || max_dim.height == 0) {
+ max_dim = m_pCapability->raw_dim[0];
+ }
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_MAX_DIMENSION, max_dim)) {
+ LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION ");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set lock CAM_INTF_PARM_MAX_DIMENSION parm");
+ return rc;
+ }
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ ADD_GET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_RAW_DIMENSION);
+
+ rc = commitGetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to get commit CAM_INTF_PARM_RAW_DIMENSION");
+ return rc;
+ }
+
+ READ_PARAM_ENTRY(m_pParamBuf, CAM_INTF_PARM_RAW_DIMENSION, raw_dim);
+
+ LOGH("RAW Dimension = %d X %d",raw_dim.width,raw_dim.height);
+ if (raw_dim.width == 0 || raw_dim.height == 0) {
+ LOGW("Error getting RAW size. Setting to Capability value");
+ if (getQuadraCfa()) {
+ raw_dim = m_pCapability->quadra_cfa_dim[0];
+ } else {
+ raw_dim = m_pCapability->raw_dim[0];
+ }
+ }
+ setRawSize(raw_dim);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setHDRSceneEnable
+ *
+ * DESCRIPTION: sets hdr scene deteced flag
+ *
+ * PARAMETERS :
+ * @bflag : hdr scene deteced
+ *
+ * RETURN : nothing
+ *==========================================================================*/
+void QCameraParameters::setHDRSceneEnable(bool bflag)
+{
+ bool bupdate = false;
+ if (m_HDRSceneEnabled != bflag) {
+ bupdate = true;
+ }
+ m_HDRSceneEnabled = bflag;
+
+ if (bupdate) {
+ updateFlash(true);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getASDStateString
+ *
+ * DESCRIPTION: get ASD result in string format
+ *
+ * PARAMETERS :
+ * @scene : selected scene mode
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+ const char *QCameraParameters::getASDStateString(cam_auto_scene_t scene)
+{
+ switch (scene) {
+ case S_NORMAL :
+ return "Normal";
+ case S_SCENERY:
+ return "Scenery";
+ case S_PORTRAIT:
+ return "Portrait";
+ case S_PORTRAIT_BACKLIGHT:
+ return "Portrait-Backlight";
+ case S_SCENERY_BACKLIGHT:
+ return "Scenery-Backlight";
+ case S_BACKLIGHT:
+ return "Backlight";
+ default:
+ return "<Unknown!>";
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : parseNDimVector
+ *
+ * DESCRIPTION: helper function to parse a string like "(1, 2, 3, 4, ..., N)"
+ * into N-dimension vector
+ *
+ * PARAMETERS :
+ * @str : string to be parsed
+ * @num : output array of size N to store vector element values
+ * @N : number of dimension
+ * @delim : delimeter to seperete string
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+ char *start, *end;
+ if (num == NULL) {
+ LOGE("Invalid output array (num == NULL)");
+ return BAD_VALUE;
+ }
+
+ //check if string starts and ends with parantheses
+ if(str[0] != '(' || str[strlen(str)-1] != ')') {
+ LOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)",
+ str);
+ return BAD_VALUE;
+ }
+ start = (char*) str;
+ start++;
+ for(int i=0; i<N; i++) {
+ *(num+i) = (int) strtol(start, &end, 10);
+ if(*end != delim && i < N-1) {
+ LOGE("Cannot find delimeter '%c' in string \"%s\". end = %c",
+ delim, str, *end);
+ return -1;
+ }
+ start = end+1;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : parseCameraAreaString
+ *
+ * DESCRIPTION: helper function to parse a string of camera areas like
+ * "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+ *
+ * PARAMETERS :
+ * @str : string to be parsed
+ * @max_num_areas : max number of areas
+ * @pAreas : ptr to struct to store areas
+ * @num_areas_found : number of areas found
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseCameraAreaString(const char *str,
+ int max_num_areas,
+ cam_area_t *pAreas,
+ int& num_areas_found)
+{
+ char area_str[32];
+ const char *start, *end, *p;
+ start = str; end = NULL;
+ int values[5], index=0;
+ num_areas_found = 0;
+
+ memset(values, 0, sizeof(values));
+ while(start != NULL) {
+ if(*start != '(') {
+ LOGE("error: Ill formatted area string: %s", str);
+ return BAD_VALUE;
+ }
+ end = strchr(start, ')');
+ if(end == NULL) {
+ LOGE("error: Ill formatted area string: %s", str);
+ return BAD_VALUE;
+ }
+ int i;
+ for (i=0,p=start; p<=end; p++, i++) {
+ area_str[i] = *p;
+ }
+ area_str[i] = '\0';
+ if(parseNDimVector(area_str, values, 5) < 0){
+ LOGE("error: Failed to parse the area string: %s", area_str);
+ return BAD_VALUE;
+ }
+ // no more areas than max_num_areas are accepted.
+ if(index >= max_num_areas) {
+ LOGE("error: too many areas specified %s", str);
+ return BAD_VALUE;
+ }
+ pAreas[index].rect.left = values[0];
+ pAreas[index].rect.top = values[1];
+ pAreas[index].rect.width = values[2] - values[0];
+ pAreas[index].rect.height = values[3] - values[1];
+ pAreas[index].weight = values[4];
+
+ index++;
+ start = strchr(end, '('); // serach for next '('
+ }
+ num_areas_found = index;
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : validateCameraAreas
+ *
+ * DESCRIPTION: helper function to validate camera areas within (-1000, 1000)
+ *
+ * PARAMETERS :
+ * @areas : ptr to array of areas
+ * @num_areas : number of areas
+ *
+ * RETURN : true -- area is in valid range
+ * false -- not valid
+ *==========================================================================*/
+bool QCameraParameters::validateCameraAreas(cam_area_t *areas, int num_areas)
+{
+ // special case: default area
+ if (num_areas == 1 &&
+ areas[0].rect.left == 0 &&
+ areas[0].rect.top == 0 &&
+ areas[0].rect.width == 0 &&
+ areas[0].rect.height == 0 &&
+ areas[0].weight == 0) {
+ return true;
+ }
+
+ for(int i = 0; i < num_areas; i++) {
+ // left should be >= -1000
+ if(areas[i].rect.left < -1000) {
+ return false;
+ }
+
+ // top should be >= -1000
+ if(areas[i].rect.top < -1000) {
+ return false;
+ }
+
+ // width or height should be > 0
+ if (areas[i].rect.width <= 0 || areas[i].rect.height <= 0) {
+ return false;
+ }
+
+ // right should be <= 1000
+ if(areas[i].rect.left + areas[i].rect.width > 1000) {
+ return false;
+ }
+
+ // bottom should be <= 1000
+ if(areas[i].rect.top + areas[i].rect.height > 1000) {
+ return false;
+ }
+
+ // weight should be within (1, 1000)
+ if (areas[i].weight < 1 || areas[i].weight > 1000) {
+ return false;
+ }
+ }
+ return true;
+}
+
+/*===========================================================================
+ * FUNCTION : isYUVFrameInfoNeeded
+ *
+ * DESCRIPTION: In AE-Bracket mode, we need set yuv buffer information for up-layer
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCameraParameters::isYUVFrameInfoNeeded()
+{
+ //In AE-Bracket mode, we need set raw buffer information for up-layer
+ if(!isNV21PictureFormat() && !isNV16PictureFormat()){
+ return false;
+ }
+ const char *aecBracketStr = get(KEY_QC_AE_BRACKET_HDR);
+
+ int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+ aecBracketStr);
+ LOGH("aecBracketStr=%s, value=%d.", aecBracketStr, value);
+ return (value == CAM_EXP_BRACKETING_ON);
+}
+
+/*===========================================================================
+ * FUNCTION : getFrameFmtString
+ *
+ * DESCRIPTION: get string name of frame format
+ *
+ * PARAMETERS :
+ * @frame : frame format
+ *
+ * RETURN : string name of frame format
+ *==========================================================================*/
+const char *QCameraParameters::getFrameFmtString(cam_format_t fmt)
+{
+ return lookupNameByValue(PICTURE_TYPES_MAP, PARAM_MAP_SIZE(PICTURE_TYPES_MAP), fmt);
+}
+
+/*===========================================================================
+ * FUNCTION : setDcrf
+ *
+ * DESCRIPTION: Enable/Disable DCRF (dual-camera-range-finding)
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::setDcrf()
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+
+ // Set DCRF to off by default (assuming single-camera mode)
+ m_bDcrfEnabled = 0;
+
+ // In dual-cam mode, get sysprop and set it to on by default
+ if(m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ property_get("persist.camera.dcrf.enable", prop, "1");
+ m_bDcrfEnabled = atoi(prop);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setRelatedCamSyncInfo
+ *
+ * DESCRIPTION: set the related cam info parameters
+ * the related cam info is cached into params to make some decisions beforehand
+ *
+ * PARAMETERS :
+ * @info : ptr to related cam info parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRelatedCamSyncInfo(
+ cam_sync_related_sensors_event_info_t* info)
+{
+ if(info != NULL){
+ memcpy(&m_relCamSyncInfo, info,
+ sizeof(cam_sync_related_sensors_event_info_t));
+ return NO_ERROR;
+ } else {
+ LOGE("info buffer is null");
+ return UNKNOWN_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getRelatedCamSyncInfo
+ *
+ * DESCRIPTION:returns the related cam sync info for this HWI instance
+ *
+ * PARAMETERS :none
+ *
+ * RETURN : const pointer to cam_sync_related_sensors_event_info_t
+ *==========================================================================*/
+const cam_sync_related_sensors_event_info_t*
+ QCameraParameters::getRelatedCamSyncInfo(void)
+{
+ return &m_relCamSyncInfo;
+}
+
+/*===========================================================================
+ * FUNCTION : setFrameSyncEnabled
+ *
+ * DESCRIPTION: sets whether frame sync is enabled
+ *
+ * PARAMETERS :
+ * @enable : flag whether to enable or disable frame sync
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFrameSyncEnabled(bool enable)
+{
+ m_bFrameSyncEnabled = enable;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : isFrameSyncEnabled
+ *
+ * DESCRIPTION: returns whether frame sync is enabled
+ *
+ * PARAMETERS :none
+ *
+ * RETURN : bool indicating whether frame sync is enabled
+ *==========================================================================*/
+bool QCameraParameters::isFrameSyncEnabled(void)
+{
+ return m_bFrameSyncEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION : bundleRelatedCameras
+ *
+ * DESCRIPTION: send trigger for bundling related camera sessions in the server
+ *
+ * PARAMETERS :
+ * @sync :indicates whether syncing is On or Off
+ * @sessionid :session id for other camera session
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::bundleRelatedCameras(bool sync,
+ uint32_t sessionid)
+{
+ int32_t rc = NO_ERROR;
+
+ if (NULL == m_pCamOpsTbl) {
+ LOGE("Ops not initialized");
+ return NO_INIT;
+ }
+
+ LOGD("Sending Bundling cmd sync %d, SessionId %d ",
+ sync, sessionid);
+
+ if(m_pRelCamSyncBuf) {
+ if(sync) {
+ m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
+ }
+ else {
+ m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
+ }
+ m_pRelCamSyncBuf->mode = m_relCamSyncInfo.mode;
+ m_pRelCamSyncBuf->type = m_relCamSyncInfo.type;
+ m_pRelCamSyncBuf->related_sensor_session_id = sessionid;
+ rc = m_pCamOpsTbl->ops->sync_related_sensors(
+ m_pCamOpsTbl->camera_handle, m_pRelCamSyncBuf);
+ } else {
+ LOGE("Related Cam SyncBuffer not allocated", rc);
+ return NO_INIT;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getRelatedCamCalibration
+ *
+ * DESCRIPTION: fetch the related camera subsystem calibration data
+ *
+ * PARAMETERS :
+ * @calib : calibration data fetched
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getRelatedCamCalibration(
+ cam_related_system_calibration_data_t* calib)
+{
+ int32_t rc = NO_ERROR;
+
+ if(!calib) {
+ return BAD_TYPE;
+ }
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ ADD_GET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION);
+
+ rc = commitGetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to get related cam calibration info");
+ return rc;
+ }
+
+ READ_PARAM_ENTRY(m_pParamBuf,
+ CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION, *calib);
+
+ LOGD("CALIB version %d ", calib->calibration_format_version);
+ LOGD("CALIB normalized_focal_length %f ",
+ calib->main_cam_specific_calibration.normalized_focal_length);
+ LOGD("CALIB native_sensor_resolution_width %d ",
+ calib->main_cam_specific_calibration.native_sensor_resolution_width);
+ LOGD("CALIB native_sensor_resolution_height %d ",
+ calib->main_cam_specific_calibration.native_sensor_resolution_height);
+ LOGD("CALIB sensor_resolution_width %d ",
+ calib->main_cam_specific_calibration.calibration_sensor_resolution_width);
+ LOGD("CALIB sensor_resolution_height %d ",
+ calib->main_cam_specific_calibration.calibration_sensor_resolution_height);
+ LOGD("CALIB focal_length_ratio %f ",
+ calib->main_cam_specific_calibration.focal_length_ratio);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : initBatchUpdate
+ *
+ * DESCRIPTION: init camera parameters buf entries
+ *
+ * PARAMETERS :
+ * @p_table : ptr to parameter buffer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initBatchUpdate(parm_buffer_t *p_table)
+{
+ m_tempMap.clear();
+
+ clear_metadata_buffer(p_table);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : commitSetBatch
+ *
+ * DESCRIPTION: commit all set parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitSetBatch()
+{
+ int32_t rc = NO_ERROR;
+ int32_t i = 0;
+
+ if (NULL == m_pParamBuf) {
+ LOGE("Params not initialized");
+ return NO_INIT;
+ }
+
+ /* Loop to check if atleast one entry is valid */
+ for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+ if(m_pParamBuf->is_valid[i])
+ break;
+ }
+
+ if (NULL == m_pCamOpsTbl) {
+ LOGE("Ops not initialized");
+ return NO_INIT;
+ }
+
+ if (i < CAM_INTF_PARM_MAX) {
+ rc = m_pCamOpsTbl->ops->set_parms(m_pCamOpsTbl->camera_handle, m_pParamBuf);
+ }
+ if (rc == NO_ERROR) {
+ // commit change from temp storage into param map
+ rc = commitParamChanges();
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : commitGetBatch
+ *
+ * DESCRIPTION: commit all get parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitGetBatch()
+{
+ int32_t rc = NO_ERROR;
+ int32_t i = 0;
+
+ if (NULL == m_pParamBuf) {
+ LOGE("Params not initialized");
+ return NO_INIT;
+ }
+
+ /* Loop to check if atleast one entry is valid */
+ for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+ if(m_pParamBuf->is_valid[i])
+ break;
+ }
+
+ if (NULL == m_pCamOpsTbl) {
+ LOGE("Ops not initialized");
+ return NO_INIT;
+ }
+
+ if (i < CAM_INTF_PARM_MAX) {
+ return m_pCamOpsTbl->ops->get_parms(m_pCamOpsTbl->camera_handle, m_pParamBuf);
+ } else {
+ return NO_ERROR;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : updateParamEntry
+ *
+ * DESCRIPTION: update a parameter entry in the local temp map obj
+ *
+ * PARAMETERS :
+ * @key : key of the entry
+ * @value : value of the entry
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParamEntry(const char *key, const char *value)
+{
+ m_tempMap.replaceValueFor(String8(key), String8(value));
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : commitParamChanges
+ *
+ * DESCRIPTION: commit all changes in local temp map obj into parameter obj
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParamChanges()
+{
+ size_t size = m_tempMap.size();
+ for (size_t i = 0; i < size; i++) {
+ String8 k, v;
+ k = m_tempMap.keyAt(i);
+ v = m_tempMap.valueAt(i);
+ set(k, v);
+ }
+ m_tempMap.clear();
+
+ // update local changes
+ m_bRecordingHint = m_bRecordingHint_new;
+ m_bZslMode = m_bZslMode_new;
+
+ /* After applying scene mode auto,
+ Camera effects need to be reapplied */
+ if ( m_bSceneTransitionAuto ) {
+ m_bUpdateEffects = true;
+ m_bSceneTransitionAuto = false;
+ }
+
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraReprocScaleParam
+ *
+ * DESCRIPTION: constructor of QCameraReprocScaleParam
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraParameters::QCameraReprocScaleParam::QCameraReprocScaleParam()
+ : mScaleEnabled(false),
+ mIsUnderScaling(false),
+ mNeedScaleCnt(0),
+ mSensorSizeTblCnt(0),
+ mSensorSizeTbl(NULL),
+ mTotalSizeTblCnt(0)
+{
+ mPicSizeFromAPK.width = 0;
+ mPicSizeFromAPK.height = 0;
+ mPicSizeSetted.width = 0;
+ mPicSizeSetted.height = 0;
+ memset(mNeedScaledSizeTbl, 0, sizeof(mNeedScaledSizeTbl));
+ memset(mTotalSizeTbl, 0, sizeof(mTotalSizeTbl));
+}
+
+/*===========================================================================
+ * FUNCTION : ~~QCameraReprocScaleParam
+ *
+ * DESCRIPTION: destructor of QCameraReprocScaleParam
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraParameters::QCameraReprocScaleParam::~QCameraReprocScaleParam()
+{
+ //do nothing now.
+}
+
+/*===========================================================================
+ * FUNCTION : setScaledSizeTbl
+ *
+ * DESCRIPTION: re-set picture size table with dimensions that need scaling if Reproc Scale is enabled
+ *
+ * PARAMETERS :
+ * @scale_cnt : count of picture sizes that want scale
+ * @scale_tbl : picture size table that want scale
+ * @org_cnt : sensor supported picture size count
+ * @org_tbl : sensor supported picture size table
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::setScaleSizeTbl(size_t scale_cnt,
+ cam_dimension_t *scale_tbl, size_t org_cnt, cam_dimension_t *org_tbl)
+{
+ int32_t rc = NO_ERROR;
+ size_t i;
+ mNeedScaleCnt = 0;
+
+ if(!mScaleEnabled || scale_cnt <=0 || scale_tbl == NULL || org_cnt <=0 || org_tbl == NULL){
+ return BAD_VALUE; // Do not need scale, so also need not reset picture size table
+ }
+
+ mSensorSizeTblCnt = org_cnt;
+ mSensorSizeTbl = org_tbl;
+ mNeedScaleCnt = checkScaleSizeTable(scale_cnt, scale_tbl, org_cnt, org_tbl);
+ if(mNeedScaleCnt <= 0){
+ LOGE("do not have picture sizes need scaling.");
+ return BAD_VALUE;
+ }
+
+ if(mNeedScaleCnt + org_cnt > MAX_SIZES_CNT){
+ LOGE("picture size list exceed the max count.");
+ return BAD_VALUE;
+ }
+
+ //get the total picture size table
+ mTotalSizeTblCnt = mNeedScaleCnt + org_cnt;
+
+ if (mNeedScaleCnt > MAX_SCALE_SIZES_CNT) {
+ LOGE("Error!! mNeedScaleCnt (%d) is more than MAX_SCALE_SIZES_CNT",
+ mNeedScaleCnt);
+ return BAD_VALUE;
+ }
+
+ for(i = 0; i < mNeedScaleCnt; i++){
+ mTotalSizeTbl[i].width = mNeedScaledSizeTbl[i].width;
+ mTotalSizeTbl[i].height = mNeedScaledSizeTbl[i].height;
+ LOGH("scale picture size: i =%d, width=%d, height=%d.",
+ i, mTotalSizeTbl[i].width, mTotalSizeTbl[i].height);
+ }
+ for(; i < mTotalSizeTblCnt; i++){
+ mTotalSizeTbl[i].width = org_tbl[i-mNeedScaleCnt].width;
+ mTotalSizeTbl[i].height = org_tbl[i-mNeedScaleCnt].height;
+ LOGH("sensor supportted picture size: i =%d, width=%d, height=%d.",
+ i, mTotalSizeTbl[i].width, mTotalSizeTbl[i].height);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getScaledSizeTblCnt
+ *
+ * DESCRIPTION: get picture size cnt that need scale
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : uint8_t type of picture size count
+ *==========================================================================*/
+size_t QCameraParameters::QCameraReprocScaleParam::getScaleSizeTblCnt()
+{
+ return mNeedScaleCnt;
+}
+
+/*===========================================================================
+ * FUNCTION : getScaledSizeTbl
+ *
+ * DESCRIPTION: get picture size table that need scale
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : cam_dimension_t list of picture size table
+ *==========================================================================*/
+cam_dimension_t *QCameraParameters::QCameraReprocScaleParam::getScaledSizeTbl()
+{
+ if(!mScaleEnabled)
+ return NULL;
+
+ return mNeedScaledSizeTbl;
+}
+
+/*===========================================================================
+ * FUNCTION : setScaleEnable
+ *
+ * DESCRIPTION: enable or disable Reproc Scale
+ *
+ * PARAMETERS :
+ * @enabled : enable: 1; disable 0
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::QCameraReprocScaleParam::setScaleEnable(bool enabled)
+{
+ mScaleEnabled = enabled;
+}
+
+/*===========================================================================
+ * FUNCTION : isScaleEnabled
+ *
+ * DESCRIPTION: check if Reproc Scale is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isScaleEnabled()
+{
+ return mScaleEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION : isScalePicSize
+ *
+ * DESCRIPTION: check if current picture size is from Scale Table
+ *
+ * PARAMETERS :
+ * @width : current picture width
+ * @height : current picture height
+ *
+ * RETURN : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isScalePicSize(int width, int height)
+{
+ //Check if the picture size is in scale table
+ if(mNeedScaleCnt <= 0)
+ return FALSE;
+
+ for (size_t i = 0; i < mNeedScaleCnt; i++) {
+ if ((mNeedScaledSizeTbl[i].width == width) && (mNeedScaledSizeTbl[i].height == height)) {
+ //found match
+ return TRUE;
+ }
+ }
+
+ LOGE("Not in scale picture size table.");
+ return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION : isValidatePicSize
+ *
+ * DESCRIPTION: check if current picture size is validate
+ *
+ * PARAMETERS :
+ * @width : current picture width
+ * @height : current picture height
+ *
+ * RETURN : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isValidatePicSize(int width, int height)
+{
+ size_t i = 0;
+
+ for(i = 0; i < mSensorSizeTblCnt; i++){
+ if(mSensorSizeTbl[i].width == width
+ && mSensorSizeTbl[i].height== height){
+ return TRUE;
+ }
+ }
+
+ for(i = 0; i < mNeedScaleCnt; i++){
+ if(mNeedScaledSizeTbl[i].width == width
+ && mNeedScaledSizeTbl[i].height== height){
+ return TRUE;
+ }
+ }
+
+ LOGE("Invalidate input picture size.");
+ return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION : setSensorSupportedPicSize
+ *
+ * DESCRIPTION: set sensor supported picture size.
+ * For Snapshot stream size configuration, we need use sensor supported size.
+ * We will use CPP to do Scaling based on output Snapshot stream.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::setSensorSupportedPicSize()
+{
+ //will find a suitable picture size (here we leave a prossibility to add other scale requirement)
+ //Currently we only focus on upscaling, and checkScaleSizeTable() has guaranteed the dimension ratio.
+
+ if(!mIsUnderScaling || mSensorSizeTblCnt <= 0)
+ return BAD_VALUE;
+
+ //We just get the max sensor supported size here.
+ mPicSizeSetted.width = mSensorSizeTbl[0].width;
+ mPicSizeSetted.height = mSensorSizeTbl[0].height;
+
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : setValidatePicSize
+ *
+ * DESCRIPTION: set sensor supported size and change scale status.
+ *
+ * PARAMETERS :
+ * @width : input picture width
+ * @height : input picture height
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::setValidatePicSize(int &width,int &height)
+{
+ if(!mScaleEnabled)
+ return BAD_VALUE;
+
+ mIsUnderScaling = FALSE; //default: not under scale
+
+ if(isScalePicSize(width, height)){
+ // input picture size need scaling operation. Record size from APK and setted
+ mIsUnderScaling = TRUE;
+ mPicSizeFromAPK.width = width;
+ mPicSizeFromAPK.height = height;
+
+ if(setSensorSupportedPicSize() != NO_ERROR)
+ return BAD_VALUE;
+
+ //re-set picture size to sensor supported size
+ width = mPicSizeSetted.width;
+ height = mPicSizeSetted.height;
+ LOGH("mPicSizeFromAPK- with=%d, height=%d, mPicSizeSetted- with =%d, height=%d.",
+ mPicSizeFromAPK.width, mPicSizeFromAPK.height, mPicSizeSetted.width, mPicSizeSetted.height);
+ }else{
+ mIsUnderScaling = FALSE;
+ //no scale is needed for input picture size
+ if(!isValidatePicSize(width, height)){
+ LOGE("invalidate input picture size.");
+ return BAD_VALUE;
+ }
+ mPicSizeSetted.width = width;
+ mPicSizeSetted.height = height;
+ }
+
+ LOGH("X. mIsUnderScaling=%d, width=%d, height=%d.", mIsUnderScaling, width, height);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getPicSizeFromAPK
+ *
+ * DESCRIPTION: get picture size that get from APK
+ *
+ * PARAMETERS :
+ * @width : input width
+ * @height : input height
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::getPicSizeFromAPK(int &width, int &height)
+{
+ if(!mIsUnderScaling)
+ return BAD_VALUE;
+
+ width = mPicSizeFromAPK.width;
+ height = mPicSizeFromAPK.height;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getPicSizeSetted
+ *
+ * DESCRIPTION: get picture size that setted into mm-camera
+ *
+ * PARAMETERS :
+ * @width : input width
+ * @height : input height
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::getPicSizeSetted(int &width, int &height)
+{
+ width = mPicSizeSetted.width;
+ height = mPicSizeSetted.height;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : isUnderScaling
+ *
+ * DESCRIPTION: check if we are in Reproc Scaling requirment
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isUnderScaling()
+{
+ return mIsUnderScaling;
+}
+
+/*===========================================================================
+ * FUNCTION : checkScaleSizeTable
+ *
+ * DESCRIPTION: check PICTURE_SIZE_NEED_SCALE to choose
+ *
+ * PARAMETERS :
+ * @scale_cnt : count of picture sizes that want scale
+ * @scale_tbl : picture size table that want scale
+ * @org_cnt : sensor supported picture size count
+ * @org_tbl : sensor supported picture size table
+ *
+ * RETURN : bool type of status
+ *==========================================================================*/
+size_t QCameraParameters::QCameraReprocScaleParam::checkScaleSizeTable(size_t scale_cnt,
+ cam_dimension_t *scale_tbl, size_t org_cnt, cam_dimension_t *org_tbl)
+{
+ size_t stbl_cnt = 0;
+ size_t temp_cnt = 0;
+ ssize_t i = 0;
+ if(scale_cnt <=0 || scale_tbl == NULL || org_tbl == NULL || org_cnt <= 0)
+ return stbl_cnt;
+
+ //get validate scale size table. Currently we only support:
+ // 1. upscale. The scale size must larger than max sensor supported size
+ // 2. Scale dimension ratio must be same as the max sensor supported size.
+ temp_cnt = scale_cnt;
+ for (i = (ssize_t)(scale_cnt - 1); i >= 0; i--) {
+ if (scale_tbl[i].width > org_tbl[0].width ||
+ (scale_tbl[i].width == org_tbl[0].width &&
+ scale_tbl[i].height > org_tbl[0].height)) {
+ //get the smallest scale size
+ break;
+ }
+ temp_cnt--;
+ }
+
+ //check dimension ratio
+ double supported_ratio = (double)org_tbl[0].width / (double)org_tbl[0].height;
+ for (i = 0; i < (ssize_t)temp_cnt; i++) {
+ double cur_ratio = (double)scale_tbl[i].width / (double)scale_tbl[i].height;
+ if (fabs(supported_ratio - cur_ratio) > ASPECT_TOLERANCE) {
+ continue;
+ }
+ mNeedScaledSizeTbl[stbl_cnt].width = scale_tbl[i].width;
+ mNeedScaledSizeTbl[stbl_cnt].height= scale_tbl[i].height;
+ stbl_cnt++;
+ }
+
+ return stbl_cnt;
+}
+
+/*===========================================================================
+ * FUNCTION : getTotalSizeTblCnt
+ *
+ * DESCRIPTION: get total picture size count after adding dimensions that need scaling
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : uint8_t type of picture size count
+ *==========================================================================*/
+size_t QCameraParameters::QCameraReprocScaleParam::getTotalSizeTblCnt()
+{
+ return mTotalSizeTblCnt;
+}
+
+/*===========================================================================
+ * FUNCTION : getTotalSizeTbl
+ *
+ * DESCRIPTION: get picture size table after adding dimensions that need scaling
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : cam_dimension_t list of picture size table
+ *==========================================================================*/
+cam_dimension_t *QCameraParameters::QCameraReprocScaleParam::getTotalSizeTbl()
+{
+ if(!mScaleEnabled)
+ return NULL;
+
+ return mTotalSizeTbl;
+}
+
+/*===========================================================================
+ * FUNCTION : setEztune
+ *
+ * DESCRIPTION: Enable/Disable EZtune
+ *
+ *==========================================================================*/
+int32_t QCameraParameters::setEztune()
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.eztune.enable", prop, "0");
+ m_bEztuneEnabled = atoi(prop);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : isHDREnabled
+ *
+ * DESCRIPTION: if HDR is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCameraParameters::isHDREnabled()
+{
+ return ((m_bHDREnabled || m_HDRSceneEnabled));
+}
+
+/*===========================================================================
+ * FUNCTION : isAVTimerEnabled
+ *
+ * DESCRIPTION: if AVTimer is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCameraParameters::isAVTimerEnabled()
+{
+ return m_bAVTimerEnabled;
+}
+
+/*===========================================================================
+* FUNCTION : isDISEnabled
+*
+* DESCRIPTION: if DIS is enabled
+*
+* PARAMETERS : none
+*
+* RETURN : true: needed
+* false: no need
+*==========================================================================*/
+bool QCameraParameters::isDISEnabled()
+{
+ return m_bDISEnabled;
+}
+
+/*===========================================================================
+* FUNCTION : getISType
+*
+* DESCRIPTION: returns IS type
+*
+* PARAMETERS : none
+*
+* RETURN : IS type
+*
+*==========================================================================*/
+cam_is_type_t QCameraParameters::getISType()
+{
+ return mIsType;
+}
+
+/*===========================================================================
+* FUNCTION : getPreviewISType
+*
+* DESCRIPTION: returns IS type for preview
+*
+* PARAMETERS : none
+*
+* RETURN : IS type
+*
+*==========================================================================*/
+cam_is_type_t QCameraParameters::getPreviewISType()
+{
+ return mIsTypePreview;
+}
+
+/*===========================================================================
+ * FUNCTION : MobicatMask
+ *
+ * DESCRIPTION: returns mobicat mask
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : mobicat mask
+ *
+ *==========================================================================*/
+uint8_t QCameraParameters::getMobicatMask()
+{
+ return m_MobiMask;
+}
+
+/*===========================================================================
+ * FUNCTION : sendStreamConfigInfo
+ *
+ * DESCRIPTION: send Stream config info.
+ *
+ * PARAMETERS :
+ * @stream_config_info: Stream config information
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+bool QCameraParameters::sendStreamConfigInfo(cam_stream_size_info_t &stream_config_info) {
+ int32_t rc = NO_ERROR;
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_META_STREAM_INFO, stream_config_info)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set stream info parm");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setStreamConfigure
+ *
+ * DESCRIPTION: set stream type, stream dimension for all configured streams.
+ *
+ * PARAMETERS :
+ * @isCapture: Whether this configureation is for an image capture
+ * @previewAsPostview: Use preview as postview
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+bool QCameraParameters::setStreamConfigure(bool isCapture,
+ bool previewAsPostview, bool resetConfig) {
+
+ int32_t rc = NO_ERROR;
+ cam_stream_size_info_t stream_config_info;
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+ bool raw_capture = false;
+
+ if ( m_pParamBuf == NULL ) {
+ return NO_INIT;
+ }
+
+ memset(&stream_config_info, 0, sizeof(stream_config_info));
+ stream_config_info.num_streams = 0;
+
+ if (m_bStreamsConfigured) {
+ LOGH("Reset stream config!!");
+ rc = sendStreamConfigInfo(stream_config_info);
+ m_bStreamsConfigured = false;
+ }
+ if (resetConfig) {
+ LOGH("Done Resetting stream config!!");
+ return rc;
+ }
+
+ stream_config_info.hfr_mode = static_cast<cam_hfr_mode_t>(mHfrMode);
+ stream_config_info.buf_alignment = m_pCapability->buf_alignment;
+ stream_config_info.min_stride = m_pCapability->min_stride;
+ stream_config_info.min_scanline = m_pCapability->min_scanline;
+ stream_config_info.batch_size = getBufBatchCount();
+
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+
+ if (isZSLMode() && getRecordingHintValue() != true) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_PREVIEW;
+ getStreamDimension(CAM_STREAM_TYPE_PREVIEW,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_PREVIEW);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
+ getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_ANALYSIS;
+ updatePpFeatureMask(CAM_STREAM_TYPE_ANALYSIS);
+ getStreamDimension(CAM_STREAM_TYPE_ANALYSIS,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_ANALYSIS];
+ getStreamFormat(CAM_STREAM_TYPE_ANALYSIS,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_SNAPSHOT;
+ getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_SNAPSHOT);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
+ getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+
+ if (isUBWCEnabled() && getRecordingHintValue() != true) {
+ cam_format_t fmt;
+ getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+ if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_CALLBACK;
+ getStreamDimension(CAM_STREAM_TYPE_CALLBACK,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_CALLBACK);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_CALLBACK];
+ getStreamFormat(CAM_STREAM_TYPE_CALLBACK,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+ }
+ }
+
+ } else if (!isCapture) {
+ if (m_bRecordingHint) {
+ if (m_bDISEnabled) {
+ char value[PROPERTY_VALUE_MAX];
+ // Make default value for IS_TYPE as IS_TYPE_EIS_2_0
+ property_get("persist.camera.is_type", value, "4");
+ mIsType = static_cast<cam_is_type_t>(atoi(value));
+ // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
+ property_get("persist.camera.is_type_preview", value, "4");
+ mIsTypePreview = static_cast<cam_is_type_t>(atoi(value));
+ } else {
+ mIsType = IS_TYPE_NONE;
+ mIsTypePreview = IS_TYPE_NONE;
+ }
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_SNAPSHOT;
+ getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_SNAPSHOT);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
+ getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_VIDEO;
+ getStreamDimension(CAM_STREAM_TYPE_VIDEO,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_VIDEO);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_VIDEO];
+ getStreamFormat(CAM_STREAM_TYPE_VIDEO,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+ }
+
+ /* Analysis stream is needed by DCRF regardless of recording hint */
+ if ((getDcrf() == true) ||
+ (getRecordingHintValue() != true) ||
+ (fdModeInVideo())) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_ANALYSIS;
+ updatePpFeatureMask(CAM_STREAM_TYPE_ANALYSIS);
+ getStreamDimension(CAM_STREAM_TYPE_ANALYSIS,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_ANALYSIS];
+ getStreamFormat(CAM_STREAM_TYPE_ANALYSIS,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+ }
+
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_PREVIEW;
+ getStreamDimension(CAM_STREAM_TYPE_PREVIEW,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_PREVIEW);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
+ getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsTypePreview;
+ stream_config_info.num_streams++;
+
+ if (isUBWCEnabled() && getRecordingHintValue() != true) {
+ cam_format_t fmt;
+ getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+ if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_CALLBACK;
+ getStreamDimension(CAM_STREAM_TYPE_CALLBACK,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_CALLBACK);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_CALLBACK];
+ getStreamFormat(CAM_STREAM_TYPE_CALLBACK,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.num_streams++;
+ }
+ }
+
+ } else {
+ if (isJpegPictureFormat() || isNV16PictureFormat() || isNV21PictureFormat()) {
+ if (!getofflineRAW()) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_SNAPSHOT;
+ getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_SNAPSHOT);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
+ getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.num_streams++;
+ }
+
+ if (previewAsPostview) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_PREVIEW;
+ getStreamDimension(CAM_STREAM_TYPE_PREVIEW,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_PREVIEW);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
+ getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.num_streams++;
+ } else if(!getQuadraCfa()) {
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_POSTVIEW;
+ getStreamDimension(CAM_STREAM_TYPE_POSTVIEW,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_POSTVIEW);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_POSTVIEW];
+ getStreamFormat(CAM_STREAM_TYPE_POSTVIEW,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.num_streams++;
+ }
+ } else {
+ raw_capture = true;
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_RAW;
+ getStreamDimension(CAM_STREAM_TYPE_RAW,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_RAW);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_RAW];
+ getStreamFormat(CAM_STREAM_TYPE_RAW,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+ stream_config_info.num_streams++;
+ }
+ }
+
+ if ((!raw_capture) && ((getofflineRAW() && !getRecordingHintValue())
+ || (raw_yuv))) {
+ cam_dimension_t max_dim = {0,0};
+
+ if (!getQuadraCfa()) {
+ // Find the Maximum dimension admong all the streams
+ for (uint32_t j = 0; j < stream_config_info.num_streams; j++) {
+ if (stream_config_info.stream_sizes[j].width > max_dim.width) {
+ max_dim.width = stream_config_info.stream_sizes[j].width;
+ }
+ if (stream_config_info.stream_sizes[j].height > max_dim.height) {
+ max_dim.height = stream_config_info.stream_sizes[j].height;
+ }
+ }
+ } else {
+ max_dim.width = m_pCapability->quadra_cfa_dim[0].width;
+ max_dim.height = m_pCapability->quadra_cfa_dim[0].height;
+ }
+ LOGH("Max Dimension = %d X %d", max_dim.width, max_dim.height);
+ updateRAW(max_dim);
+ stream_config_info.type[stream_config_info.num_streams] =
+ CAM_STREAM_TYPE_RAW;
+ getStreamDimension(CAM_STREAM_TYPE_RAW,
+ stream_config_info.stream_sizes[stream_config_info.num_streams]);
+ updatePpFeatureMask(CAM_STREAM_TYPE_RAW);
+ stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+ mStreamPpMask[CAM_STREAM_TYPE_RAW];
+ getStreamFormat(CAM_STREAM_TYPE_RAW,
+ stream_config_info.format[stream_config_info.num_streams]);
+ stream_config_info.num_streams++;
+ }
+ for (uint32_t k = 0; k < stream_config_info.num_streams; k++) {
+ LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%llx Format = %d",
+ stream_config_info.type[k],
+ stream_config_info.stream_sizes[k].width,
+ stream_config_info.stream_sizes[k].height,
+ stream_config_info.postprocess_mask[k],
+ stream_config_info.format[k]);
+ }
+
+ rc = sendStreamConfigInfo(stream_config_info);
+ m_bStreamsConfigured = true;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : addOnlineRotation
+ *
+ * DESCRIPTION: send additional rotation information for specific stream
+ *
+ * PARAMETERS :
+ * @rotation: rotation
+ * @streamId: internal stream id
+ * @device_rotation: device rotation
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::addOnlineRotation(uint32_t rotation, uint32_t streamId,
+ int32_t device_rotation)
+{
+ int32_t rc = NO_ERROR;
+ cam_rotation_info_t rotation_info;
+ memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
+
+ /* Add jpeg rotation information */
+ if (rotation == 0) {
+ rotation_info.rotation = ROTATE_0;
+ } else if (rotation == 90) {
+ rotation_info.rotation = ROTATE_90;
+ } else if (rotation == 180) {
+ rotation_info.rotation = ROTATE_180;
+ } else if (rotation == 270) {
+ rotation_info.rotation = ROTATE_270;
+ } else {
+ rotation_info.rotation = ROTATE_0;
+ }
+ rotation_info.streamId = streamId;
+
+ /* Add device rotation information */
+ if (device_rotation == 0) {
+ rotation_info.device_rotation = ROTATE_0;
+ } else if (device_rotation == 90) {
+ rotation_info.device_rotation = ROTATE_90;
+ } else if (device_rotation == 180) {
+ rotation_info.device_rotation = ROTATE_180;
+ } else if (device_rotation == 270) {
+ rotation_info.device_rotation = ROTATE_270;
+ } else {
+ rotation_info.device_rotation = ROTATE_0;
+ }
+
+ if(initBatchUpdate(m_pParamBuf) < 0 ) {
+ LOGE("Failed to initialize group update table");
+ return BAD_TYPE;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ROTATION, rotation_info)) {
+ LOGE("Failed to update table");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to set stream info parm");
+ return rc;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : needThumbnailReprocess
+ *
+ * DESCRIPTION: Check if thumbnail reprocessing is needed
+ *
+ * PARAMETERS : @pFeatureMask - feature mask
+ *
+ * RETURN : true: needed
+ * false: no need
+ *==========================================================================*/
+bool QCameraParameters::needThumbnailReprocess(cam_feature_mask_t *pFeatureMask)
+{
+ if (isUbiFocusEnabled() || isChromaFlashEnabled() ||
+ isOptiZoomEnabled() || isUbiRefocus() ||
+ isStillMoreEnabled() ||
+ (isHDREnabled() && !isHDRThumbnailProcessNeeded())
+ || isUBWCEnabled()|| getQuadraCfa()) {
+ *pFeatureMask &= ~CAM_QCOM_FEATURE_CHROMA_FLASH;
+ *pFeatureMask &= ~CAM_QCOM_FEATURE_UBIFOCUS;
+ *pFeatureMask &= ~CAM_QCOM_FEATURE_REFOCUS;
+ *pFeatureMask &= ~CAM_QCOM_FEATURE_OPTIZOOM;
+ *pFeatureMask &= ~CAM_QCOM_FEATURE_STILLMORE;
+ *pFeatureMask &= ~CAM_QCOM_FEATURE_HDR;
+ return false;
+ } else {
+ cam_dimension_t thumb_dim;
+ getThumbnailSize(&(thumb_dim.width), &(thumb_dim.height));
+ if (thumb_dim.width == 0 || thumb_dim.height == 0) {
+ return false;
+ }
+ else {
+ return true;
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfExtraBuffersForImageProc
+ *
+ * DESCRIPTION: get number of extra input buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of extra buffers needed by ImageProc;
+ * 0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForImageProc()
+{
+ int numOfBufs = 0;
+
+ if (isUbiRefocus()) {
+ return (uint8_t)(m_pCapability->refocus_af_bracketing_need.burst_count - 1);
+ } else if (isUbiFocusEnabled()) {
+ numOfBufs += m_pCapability->ubifocus_af_bracketing_need.burst_count - 1;
+ } else if (m_bOptiZoomOn) {
+ numOfBufs += m_pCapability->opti_zoom_settings_need.burst_count - 1;
+ } else if (isChromaFlashEnabled()) {
+ numOfBufs += m_pCapability->chroma_flash_settings_need.burst_count - 1;
+ } else if (isStillMoreEnabled()) {
+ if (isSeeMoreEnabled()) {
+ m_stillmore_config.burst_count = 1;
+ } else if ((m_stillmore_config.burst_count >=
+ m_pCapability->stillmore_settings_need.min_burst_count) &&
+ (m_stillmore_config.burst_count <=
+ m_pCapability->stillmore_settings_need.max_burst_count)) {
+ numOfBufs += m_stillmore_config.burst_count - 1;
+ } else {
+ numOfBufs += m_pCapability->stillmore_settings_need.burst_count - 1;
+ }
+ } else if (isOEMFeatEnabled()) {
+ numOfBufs += 1;
+ }
+
+ if (getQuadraCfa()) {
+ numOfBufs += 1;
+ }
+
+ return (uint8_t)(numOfBufs);
+}
+
+/*===========================================================================
+ * FUNCTION : getExifBufIndex
+ *
+ * DESCRIPTION: get index of metadata to be used for EXIF
+ *
+ * PARAMETERS : @captureIndex - index of current captured frame
+ *
+ * RETURN : index of metadata to be used for EXIF
+ *==========================================================================*/
+uint32_t QCameraParameters::getExifBufIndex(uint32_t captureIndex)
+{
+ uint32_t index = captureIndex;
+
+ if (isUbiRefocus()) {
+ if (captureIndex < m_pCapability->refocus_af_bracketing_need.burst_count) {
+ index = captureIndex;
+ } else {
+ index = 0;
+ }
+ } else if (isChromaFlashEnabled()) {
+ index = m_pCapability->chroma_flash_settings_need.metadata_index;
+ } else if (isHDREnabled()) {
+ if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+ index = m_pCapability->hdr_bracketing_setting.num_frames;
+ } else {
+ for (index = 0; index < m_pCapability->hdr_bracketing_setting.num_frames; index++) {
+ if (0 == m_pCapability->hdr_bracketing_setting.exp_val.values[index]) {
+ break;
+ }
+ }
+ if (index == m_pCapability->hdr_bracketing_setting.num_frames) {
+ index = captureIndex;
+ }
+ }
+ }
+
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumberInBufsForSingleShot
+ *
+ * DESCRIPTION: get number of input buffers for single shot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of input buffers for single shot
+ *==========================================================================*/
+uint32_t QCameraParameters::getNumberInBufsForSingleShot()
+{
+ uint32_t numOfBufs = 1;
+
+ if (isUbiRefocus()) {
+ numOfBufs = m_pCapability->refocus_af_bracketing_need.burst_count;
+ } else if (isUbiFocusEnabled()) {
+ numOfBufs = m_pCapability->ubifocus_af_bracketing_need.burst_count;
+ } else if (m_bOptiZoomOn) {
+ numOfBufs = m_pCapability->opti_zoom_settings_need.burst_count;
+ } else if (isChromaFlashEnabled()) {
+ numOfBufs = m_pCapability->chroma_flash_settings_need.burst_count;
+ } else if (isHDREnabled()) {
+ numOfBufs = m_pCapability->hdr_bracketing_setting.num_frames;
+ if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+ numOfBufs++;
+ }
+ } else if (isStillMoreEnabled()) {
+ if (isSeeMoreEnabled()) {
+ m_stillmore_config.burst_count = 1;
+ numOfBufs = m_stillmore_config.burst_count;
+ } else if ((m_stillmore_config.burst_count >=
+ m_pCapability->stillmore_settings_need.min_burst_count) &&
+ (m_stillmore_config.burst_count <=
+ m_pCapability->stillmore_settings_need.max_burst_count)) {
+ numOfBufs = m_stillmore_config.burst_count;
+ } else {
+ numOfBufs = m_pCapability->stillmore_settings_need.burst_count;
+ }
+ }
+
+ return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumberOutBufsForSingleShot
+ *
+ * DESCRIPTION: get number of output buffers for single shot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of output buffers for single shot
+ *==========================================================================*/
+uint32_t QCameraParameters::getNumberOutBufsForSingleShot()
+{
+ uint32_t numOfBufs = 1;
+
+ if (isUbiRefocus()) {
+ numOfBufs = m_pCapability->refocus_af_bracketing_need.output_count;
+ } else if (isHDREnabled()) {
+ if (isHDR1xFrameEnabled()) {
+ numOfBufs++;
+ }
+ }
+
+ return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION : is4k2kVideoResolution
+ *
+ * DESCRIPTION: if resolution is 4k x 2k or true 4k x 2k
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: video resolution is 4k x 2k
+ * false: video resolution is not 4k x 2k
+ *==========================================================================*/
+bool QCameraParameters::is4k2kVideoResolution()
+{
+ bool enabled = false;
+ cam_dimension_t resolution;
+ getVideoSize(&resolution.width, &resolution.height);
+ if (!(resolution.width < 3840 && resolution.height < 2160)) {
+ enabled = true;
+ }
+
+ return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION : isPreviewSeeMoreRequired
+ *
+ * DESCRIPTION: This function checks whether SeeMmore(SW TNR) needs to be applied for
+ * preview stream depending on video resoluion and setprop
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : true: If SeeMore needs to apply
+ * false: No need to apply
+ *==========================================================================*/
+bool QCameraParameters::isPreviewSeeMoreRequired()
+{
+ cam_dimension_t dim;
+ char prop[PROPERTY_VALUE_MAX];
+
+ getVideoSize(&dim.width, &dim.height);
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.preview.seemore", prop, "0");
+ int enable = atoi(prop);
+
+ // Enable SeeMore for preview stream if :
+ // 1. Video resolution <= (1920x1080) (or)
+ // 2. persist.camera.preview.seemore is set
+ LOGD("width=%d, height=%d, enable=%d", dim.width, dim.height, enable);
+ return (((dim.width * dim.height) <= (1920 * 1080)) || enable);
+}
+
+/*===========================================================================
+ * FUNCTION : updateDebugLevel
+ *
+ * DESCRIPTION: send CAM_INTF_PARM_UPDATE_DEBUG_LEVEL to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : NO_ERROR --success
+ * int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::updateDebugLevel()
+{
+ if ( m_pParamBuf == NULL ) {
+ return NO_INIT;
+ }
+
+ int32_t rc = initBatchUpdate(m_pParamBuf);
+ if ( rc != NO_ERROR ) {
+ LOGE("Failed to initialize group update table");
+ return rc;
+ }
+
+ uint32_t dummyDebugLevel = 0;
+ /* The value of dummyDebugLevel is irrelavent. On
+ * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, dummyDebugLevel)) {
+ LOGE("Parameters batch failed");
+ return BAD_VALUE;
+ }
+
+ rc = commitSetBatch();
+ if ( rc != NO_ERROR ) {
+ LOGE("Failed to commit batch parameters");
+ return rc;
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setOfflineRAW
+ *
+ * DESCRIPTION: Function to decide Offline RAW feature.
+ *
+ * PARAMETERS :
+ * @raw_value: offline raw value to set.
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraParameters::setOfflineRAW(bool raw_value)
+{
+ char value[PROPERTY_VALUE_MAX];
+ bool raw_yuv = false;
+ bool offlineRaw = false;
+
+ if (raw_value) {
+ mOfflineRAW = true;
+ LOGH("Offline Raw %d", mOfflineRAW);
+ return;
+ }
+
+ property_get("persist.camera.raw_yuv", value, "0");
+ raw_yuv = atoi(value) > 0 ? true : false;
+ property_get("persist.camera.offlineraw", value, "0");
+ offlineRaw = atoi(value) > 0 ? true : false;
+ if ((raw_yuv || isRdiMode()) && offlineRaw) {
+ mOfflineRAW = true;
+ } else {
+ mOfflineRAW = false;
+ }
+ LOGH("Offline Raw %d", mOfflineRAW);
+}
+
+/*===========================================================================
+ * FUNCTION : updatePpFeatureMask
+ *
+ * DESCRIPTION: Updates the feature mask for a particular stream depending
+ * on current client configuration.
+ *
+ * PARAMETERS :
+ * @stream_type: Camera stream type
+ *
+ * RETURN : NO_ERROR --success
+ * int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::updatePpFeatureMask(cam_stream_type_t stream_type) {
+
+ cam_feature_mask_t feature_mask = 0;
+
+ if (stream_type >= CAM_STREAM_TYPE_MAX) {
+ LOGE("Error!! stream type: %d not valid", stream_type);
+ return -1;
+ }
+
+ // Update feature mask for SeeMore in video and video preview
+ if (isSeeMoreEnabled() && ((stream_type == CAM_STREAM_TYPE_VIDEO) ||
+ (stream_type == CAM_STREAM_TYPE_PREVIEW && getRecordingHintValue() &&
+ isPreviewSeeMoreRequired()))) {
+ feature_mask |= CAM_QCOM_FEATURE_LLVD;
+ }
+
+ if (isHighQualityNoiseReductionMode() &&
+ ((stream_type == CAM_STREAM_TYPE_VIDEO) ||
+ (stream_type == CAM_STREAM_TYPE_PREVIEW && getRecordingHintValue() &&
+ isPreviewSeeMoreRequired()))) {
+ feature_mask |= CAM_QTI_FEATURE_SW_TNR;
+ }
+
+ // Do not enable feature mask for ZSL/non-ZSL/liveshot snapshot except for 4K2k case
+ if ((getRecordingHintValue() &&
+ (stream_type == CAM_STREAM_TYPE_SNAPSHOT) && is4k2kVideoResolution()) ||
+ (stream_type != CAM_STREAM_TYPE_SNAPSHOT)) {
+ if ((m_nMinRequiredPpMask & CAM_QCOM_FEATURE_SHARPNESS) &&
+ !isOptiZoomEnabled()) {
+ feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+ }
+
+ if (m_nMinRequiredPpMask & CAM_QCOM_FEATURE_EFFECT) {
+ feature_mask |= CAM_QCOM_FEATURE_EFFECT;
+ }
+ if (isWNREnabled()) {
+ feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+ }
+
+ //Set flip mode based on Stream type;
+ int flipMode = getFlipMode(stream_type);
+ if (flipMode > 0) {
+ feature_mask |= CAM_QCOM_FEATURE_FLIP;
+ }
+ }
+
+ if ((isTNRVideoEnabled() && (CAM_STREAM_TYPE_VIDEO == stream_type))
+ || (isTNRPreviewEnabled() && (CAM_STREAM_TYPE_PREVIEW == stream_type))) {
+ feature_mask |= CAM_QCOM_FEATURE_CPP_TNR;
+ }
+ if (isEztuneEnabled() &&
+ ((CAM_STREAM_TYPE_PREVIEW == stream_type) ||
+ (CAM_STREAM_TYPE_SNAPSHOT == stream_type))) {
+ feature_mask |= CAM_QCOM_FEATURE_EZTUNE;
+ }
+
+ if ((getCDSMode() != CAM_CDS_MODE_OFF) &&
+ ((CAM_STREAM_TYPE_PREVIEW == stream_type) ||
+ (CAM_STREAM_TYPE_VIDEO == stream_type) ||
+ (CAM_STREAM_TYPE_CALLBACK == stream_type) ||
+ ((CAM_STREAM_TYPE_SNAPSHOT == stream_type) &&
+ getRecordingHintValue() && is4k2kVideoResolution()))) {
+ if (m_nMinRequiredPpMask & CAM_QCOM_FEATURE_DSDN) {
+ feature_mask |= CAM_QCOM_FEATURE_DSDN;
+ } else {
+ feature_mask |= CAM_QCOM_FEATURE_CDS;
+ }
+ }
+
+ if (isTNRSnapshotEnabled() && (CAM_STREAM_TYPE_SNAPSHOT == stream_type)
+ && (isZSLMode() || getRecordingHintValue())) {
+ feature_mask |= CAM_QCOM_FEATURE_CPP_TNR;
+ }
+
+ //Rotation could also have an effect on pp feature mask
+ cam_pp_feature_config_t config;
+ cam_dimension_t dim;
+ memset(&config, 0, sizeof(cam_pp_feature_config_t));
+ getStreamRotation(stream_type, config, dim);
+ feature_mask |= config.feature_mask;
+
+ // Dual Camera scenarios
+ // all feature masks are disabled for preview and analysis streams for aux session
+ // all required feature masks for aux session preview and analysis streams need
+ // to be enabled explicitly here
+ ///@note When aux camera is of bayer type, keep pp mask as is or we'd run
+ /// into stream mapping problems. YUV sensor is marked as interleaved and has
+ /// preferred mapping setup so we don't see any mapping issues.
+ if (m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+ if (((CAM_STREAM_TYPE_ANALYSIS == stream_type) ||
+ (CAM_STREAM_TYPE_PREVIEW == stream_type)) &&
+ (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) &&
+ (m_pCapability->sensor_type.sens_type == CAM_SENSOR_YUV)) {
+ LOGH("Disabling all pp feature masks for aux preview and "
+ "analysis streams");
+ feature_mask = 0;
+ }
+
+ // all feature masks need to be enabled here
+ // enable DCRF feature mask on analysis stream in case of dual camera
+ if (m_bDcrfEnabled && (CAM_STREAM_TYPE_ANALYSIS == stream_type)) {
+ feature_mask |= CAM_QCOM_FEATURE_DCRF;
+ } else {
+ feature_mask &= ~CAM_QCOM_FEATURE_DCRF;
+ }
+ }
+
+ // Preview assisted autofocus needs to be supported for
+ // callback, preview, or video streams
+ cam_color_filter_arrangement_t filter_arrangement;
+ filter_arrangement = m_pCapability->color_arrangement;
+ switch (filter_arrangement) {
+ case CAM_FILTER_ARRANGEMENT_RGGB:
+ case CAM_FILTER_ARRANGEMENT_GRBG:
+ case CAM_FILTER_ARRANGEMENT_GBRG:
+ case CAM_FILTER_ARRANGEMENT_BGGR:
+ if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
+ (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
+ (stream_type == CAM_STREAM_TYPE_VIDEO && getISType() != IS_TYPE_EIS_3_0)) {
+ feature_mask |= CAM_QCOM_FEATURE_PAAF;
+ }
+ break;
+ case CAM_FILTER_ARRANGEMENT_Y:
+ if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
+ feature_mask |= CAM_QCOM_FEATURE_PAAF;
+ LOGH("add PAAF mask to feature_mask for mono device");
+ }
+ break;
+ default:
+ break;
+ }
+
+ // Store stream feature mask
+ setStreamPpMask(stream_type, feature_mask);
+ LOGH("stream type: %d, pp_mask: 0x%llx", stream_type, feature_mask);
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setStreamPpMask
+ *
+ * DESCRIPTION: Stores a particular feature mask for a given camera stream
+ *
+ * PARAMETERS :
+ * @stream_type: Camera stream type
+ * @pp_mask : Feature mask
+ *
+ * RETURN : NO_ERROR --success
+ * int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::setStreamPpMask(cam_stream_type_t stream_type,
+ cam_feature_mask_t pp_mask) {
+
+ if(stream_type >= CAM_STREAM_TYPE_MAX) {
+ return BAD_TYPE;
+ }
+
+ mStreamPpMask[stream_type] = pp_mask;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getStreamPpMask
+ *
+ * DESCRIPTION: Retrieves the feature mask for a given camera stream
+ *
+ * PARAMETERS :
+ * @stream_type: Camera stream type
+ * @pp_mask : Feature mask
+ *
+ * RETURN : NO_ERROR --success
+ * int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamPpMask(cam_stream_type_t stream_type,
+ cam_feature_mask_t &pp_mask) {
+
+ if(stream_type >= CAM_STREAM_TYPE_MAX) {
+ return BAD_TYPE;
+ }
+
+ pp_mask = mStreamPpMask[stream_type];
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : isMultiPassReprocessing
+ *
+ * DESCRIPTION: Read setprop to enable/disable multipass
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : TRUE -- If enabled
+ * FALSE -- disabled
+ *==========================================================================*/
+bool QCameraParameters::isMultiPassReprocessing()
+{
+ char value[PROPERTY_VALUE_MAX];
+ int multpass = 0;
+
+ if (getQuadraCfa()) {
+ multpass = TRUE;
+ return TRUE;
+ }
+
+ property_get("persist.camera.multi_pass", value, "0");
+ multpass = atoi(value);
+
+ return (multpass == 0)? FALSE : TRUE;
+}
+
+/*===========================================================================
+ * FUNCTION : setReprocCount
+ *
+ * DESCRIPTION: Set total reprocessing pass count
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraParameters::setReprocCount()
+{
+ mTotalPPCount = 1; //Default reprocessing Pass count
+
+ if (getManualCaptureMode() >=
+ CAM_MANUAL_CAPTURE_TYPE_3) {
+ LOGD("Additional post processing enabled for manual capture");
+ mTotalPPCount++;
+ }
+
+ if (!isMultiPassReprocessing()) {
+ return;
+ }
+
+ if ((getZoomLevel() != 0)
+ && (getBurstCountForAdvancedCapture()
+ == getNumOfSnapshots())) {
+ LOGD("2 Pass postprocessing enabled");
+ mTotalPPCount++;
+ }
+
+ if (getQuadraCfa()) {
+ mTotalPPCount++;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : isUBWCEnabled
+ *
+ * DESCRIPTION: Function to get UBWC hardware support.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : TRUE -- UBWC format supported
+ * FALSE -- UBWC is not supported.
+ *==========================================================================*/
+bool QCameraParameters::isUBWCEnabled()
+{
+#ifdef UBWC_PRESENT
+ char value[PROPERTY_VALUE_MAX];
+ int prop_value = 0;
+ memset(value, 0, sizeof(value));
+ property_get("debug.gralloc.gfx_ubwc_disable", value, "0");
+ prop_value = atoi(value);
+ if (prop_value) {
+ return FALSE;
+ }
+
+ //Disable UBWC if it is YUV sensor.
+ if ((m_pCapability != NULL) &&
+ (m_pCapability->sensor_type.sens_type == CAM_SENSOR_YUV)) {
+ return FALSE;
+ }
+
+ //Disable UBWC if Eztune is enabled
+ // Eztune works on CPP output and cannot understand UBWC buffer.
+ memset(value, 0, sizeof(value));
+ property_get("persist.camera.eztune.enable", value, "0");
+ prop_value = atoi(value);
+ if (prop_value) {
+ return FALSE;
+ }
+ return TRUE;
+#else
+ return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION : isPostProcScaling
+ *
+ * DESCRIPTION: is scaling to be done by CPP?
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : TRUE : If CPP scaling enabled
+ * FALSE : If VFE scaling enabled
+ *==========================================================================*/
+bool QCameraParameters::isPostProcScaling()
+{
+ char value[PROPERTY_VALUE_MAX];
+ bool cpp_scaling = FALSE;
+
+ if (getRecordingHintValue()) {
+ return FALSE;
+ }
+
+ property_get("persist.camera.pp_scaling", value, "0");
+ cpp_scaling = atoi(value) > 0 ? TRUE : FALSE;
+
+ LOGH("Post proc scaling enabled : %d",
+ cpp_scaling);
+ return cpp_scaling;
+}
+
+/*===========================================================================
+ * FUNCTION : isLLNoiseEnabled
+ *
+ * DESCRIPTION: Low light noise change
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : TRUE : If low light noise enabled
+ * FALSE : If low light noise disabled
+ *==========================================================================*/
+bool QCameraParameters::isLLNoiseEnabled()
+{
+ char value[PROPERTY_VALUE_MAX];
+ bool llnoise = FALSE;
+
+ if (!isWNREnabled()) {
+ return FALSE;
+ }
+
+ property_get("persist.camera.llnoise", value, "0");
+ llnoise = atoi(value) > 0 ? TRUE : FALSE;
+
+ LOGH("Low light noise enabled : %d",
+ llnoise);
+ return llnoise;
+}
+
+/*===========================================================================
+ * FUNCTION : setBufBatchCount
+ *
+ * DESCRIPTION: Function to configure batch buffer
+ *
+ * PARAMETERS : int8_t buf_cnt
+ * Buffer batch count
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraParameters::setBufBatchCount(int8_t buf_cnt)
+{
+ mBufBatchCnt = 0;
+ char value[PROPERTY_VALUE_MAX];
+ int8_t count = 0;
+
+ property_get("persist.camera.batchcount", value, "0");
+ count = atoi(value);
+
+ if (!(count != 0 || buf_cnt > CAMERA_MIN_BATCH_COUNT)) {
+ LOGH("Buffer batch count = %d", mBufBatchCnt);
+ set(KEY_QC_VIDEO_BATCH_SIZE, mBufBatchCnt);
+ return;
+ }
+
+ while((m_pCapability->max_batch_bufs_supported != 0)
+ && (m_pCapability->max_batch_bufs_supported < buf_cnt)) {
+ buf_cnt = buf_cnt / 2;
+ }
+
+ if (count > 0) {
+ mBufBatchCnt = count;
+ LOGH("Buffer batch count = %d", mBufBatchCnt);
+ set(KEY_QC_VIDEO_BATCH_SIZE, mBufBatchCnt);
+ return;
+ }
+
+ if (buf_cnt > CAMERA_MIN_BATCH_COUNT) {
+ mBufBatchCnt = buf_cnt;
+ LOGH("Buffer batch count = %d", mBufBatchCnt);
+ set(KEY_QC_VIDEO_BATCH_SIZE, mBufBatchCnt);
+ return;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setVideoBatch()
+ *
+ * DESCRIPTION: Function to batching for video.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraParameters::setVideoBatchSize()
+{
+ char value[PROPERTY_VALUE_MAX];
+ int8_t minBatchcnt = 2; //Batching enabled only if batch size if greater than 2;
+ int32_t width = 0, height = 0;
+ mVideoBatchSize = 0;
+
+ if (getBufBatchCount()) {
+ //We don't need HAL to HAL batching if camera batching enabled.
+ return;
+ }
+
+ getVideoSize(&width, &height);
+ if ((width > 1920) || (height > 1080)) {
+ //Cannot enable batch mode for video size bigger than 1080p
+ return;
+ }
+
+ //Batch size "6" is the recommended and gives optimal power saving.
+ property_get("persist.camera.video.batchsize", value, "0");
+ mVideoBatchSize = atoi(value);
+
+ if (mVideoBatchSize > CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE) {
+ mVideoBatchSize = CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE;
+ } else if (mVideoBatchSize <= minBatchcnt) {
+ //Batching enabled only if batch size is greater than 2.
+ mVideoBatchSize = 0;
+ }
+ LOGD("mVideoBatchSize = %d", mVideoBatchSize);
+ set(KEY_QC_VIDEO_BATCH_SIZE, mVideoBatchSize);
+}
+
+/*===========================================================================
+ * FUNCTION : setCustomParams
+ *
+ * DESCRIPTION: Function to update OEM specific custom parameter
+ *
+ * PARAMETERS : params: Input Parameter object
+ *
+ * RETURN : error value
+ *==========================================================================*/
+int32_t QCameraParameters::setCustomParams(__unused const QCameraParameters& params)
+{
+ int32_t rc = NO_ERROR;
+
+ /* Application specific parameter can be read from "params" and update m_pParamBuf
+ We can also update internal OEM custom parameters in this funcion.
+ "CAM_CUSTOM_PARM_EXAMPLE" is used as a example */
+
+ /*Get the pointer of shared buffer for custom parameter*/
+ custom_parm_buffer_t *customParam =
+ (custom_parm_buffer_t *)POINTER_OF_META(CAM_INTF_PARM_CUSTOM, m_pParamBuf);
+
+
+ /*start updating custom parameter values*/
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(customParam, CAM_CUSTOM_PARM_EXAMPLE, 1)) {
+ LOGE("Failed to update CAM_CUSTOM_PARM_DUMMY");
+ return BAD_VALUE;
+ }
+
+ /*set custom parameter values to main parameter buffer. Update isvalid flag*/
+ ADD_GET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CUSTOM);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : dump
+ *
+ * DESCRIPTION: Composes a string based on current configuration
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : Formatted string
+ *==========================================================================*/
+String8 QCameraParameters::dump()
+{
+ String8 str("\n");
+ char s[128];
+
+ snprintf(s, 128, "Preview Pixel Fmt: %d\n", getPreviewHalPixelFormat());
+ str += s;
+
+ snprintf(s, 128, "ZSL Burst Interval: %d\n", getZSLBurstInterval());
+ str += s;
+
+ snprintf(s, 128, "ZSL Queue Depth: %d\n", getZSLQueueDepth());
+ str += s;
+
+ snprintf(s, 128, "ZSL Back Look Count %d\n", getZSLBackLookCount());
+ str += s;
+
+ snprintf(s, 128, "Max Unmatched Frames In Queue: %d\n",
+ getMaxUnmatchedFramesInQueue());
+ str += s;
+
+ snprintf(s, 128, "Is ZSL Mode: %d\n", isZSLMode());
+ str += s;
+
+ snprintf(s, 128, "Is No Display Mode: %d\n", isNoDisplayMode());
+ str += s;
+
+ snprintf(s, 128, "Is WNR Enabled: %d\n", isWNREnabled());
+ str += s;
+
+ snprintf(s, 128, "isHfrMode: %d\n", isHfrMode());
+ str += s;
+
+ snprintf(s, 128, "getNumOfSnapshots: %d\n", getNumOfSnapshots());
+ str += s;
+
+ snprintf(s, 128, "getNumOfExtraHDRInBufsIfNeeded: %d\n",
+ getNumOfExtraHDRInBufsIfNeeded());
+ str += s;
+
+ snprintf(s, 128, "getNumOfExtraHDROutBufsIfNeeded: %d\n",
+ getNumOfExtraHDROutBufsIfNeeded());
+ str += s;
+
+ snprintf(s, 128, "getRecordingHintValue: %d\n", getRecordingHintValue());
+ str += s;
+
+ snprintf(s, 128, "getJpegQuality: %u\n", getJpegQuality());
+ str += s;
+
+ snprintf(s, 128, "getJpegRotation: %u\n", getJpegRotation());
+ str += s;
+
+ snprintf(s, 128, "isHistogramEnabled: %d\n", isHistogramEnabled());
+ str += s;
+
+ snprintf(s, 128, "isFaceDetectionEnabled: %d\n", isFaceDetectionEnabled());
+ str += s;
+
+ snprintf(s, 128, "isHDREnabled: %d\n", isHDREnabled());
+ str += s;
+
+ snprintf(s, 128, "isAutoHDREnabled: %d\n", isAutoHDREnabled());
+ str += s;
+
+ snprintf(s, 128, "isAVTimerEnabled: %d\n", isAVTimerEnabled());
+ str += s;
+
+ snprintf(s, 128, "getFocusMode: %d\n", getFocusMode());
+ str += s;
+
+ snprintf(s, 128, "isJpegPictureFormat: %d\n", isJpegPictureFormat());
+ str += s;
+
+ snprintf(s, 128, "isNV16PictureFormat: %d\n", isNV16PictureFormat());
+ str += s;
+
+ snprintf(s, 128, "isNV21PictureFormat: %d\n", isNV21PictureFormat());
+ str += s;
+
+ snprintf(s, 128, "isSnapshotFDNeeded: %d\n", isSnapshotFDNeeded());
+ str += s;
+
+ snprintf(s, 128, "isHDR1xFrameEnabled: %d\n", isHDR1xFrameEnabled());
+ str += s;
+
+ snprintf(s, 128, "isYUVFrameInfoNeeded: %d\n", isYUVFrameInfoNeeded());
+ str += s;
+
+ snprintf(s, 128, "isHDR1xExtraBufferNeeded: %d\n",
+ isHDR1xExtraBufferNeeded());
+ str += s;
+
+ snprintf(s, 128, "isHDROutputCropEnabled: %d\n", isHDROutputCropEnabled());
+ str += s;
+
+ snprintf(s, 128, "isPreviewFlipChanged: %d\n", isPreviewFlipChanged());
+ str += s;
+
+ snprintf(s, 128, "isVideoFlipChanged: %d\n", isVideoFlipChanged());
+ str += s;
+
+ snprintf(s, 128, "isSnapshotFlipChanged: %d\n", isSnapshotFlipChanged());
+ str += s;
+
+ snprintf(s, 128, "isHDRThumbnailProcessNeeded: %d\n",
+ isHDRThumbnailProcessNeeded());
+ str += s;
+
+ snprintf(s, 128, "getAutoFlickerMode: %d\n", getAutoFlickerMode());
+ str += s;
+
+ snprintf(s, 128, "getNumOfExtraBuffersForImageProc: %d\n",
+ getNumOfExtraBuffersForImageProc());
+ str += s;
+
+ snprintf(s, 128, "isUbiFocusEnabled: %d\n", isUbiFocusEnabled());
+ str += s;
+
+ snprintf(s, 128, "isChromaFlashEnabled: %d\n", isChromaFlashEnabled());
+ str += s;
+
+ snprintf(s, 128, "isOptiZoomEnabled: %d\n", isOptiZoomEnabled());
+ str += s;
+
+ snprintf(s, 128, "isStillMoreEnabled: %d\n", isStillMoreEnabled());
+ str += s;
+
+ snprintf(s, 128, "getBurstCountForAdvancedCapture: %d\n",
+ getBurstCountForAdvancedCapture());
+ str += s;
+
+ return str;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfExtraBuffersForVideo
+ *
+ * DESCRIPTION: get number of extra buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of extra buffers needed by ImageProc;
+ * 0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForVideo()
+{
+ uint8_t numOfBufs = 0;
+
+ if (isSeeMoreEnabled() || isHighQualityNoiseReductionMode()) {
+ numOfBufs = 1;
+ }
+
+ return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumOfExtraBuffersForPreview
+ *
+ * DESCRIPTION: get number of extra buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of extra buffers needed by ImageProc;
+ * 0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForPreview()
+{
+ uint8_t numOfBufs = 0;
+
+ if ((isSeeMoreEnabled() || isHighQualityNoiseReductionMode())
+ && !isZSLMode() && getRecordingHintValue()) {
+ numOfBufs = 1;
+ }
+
+ return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION : setToneMapMode
+ *
+ * DESCRIPTION: enable or disable tone map
+ *
+ * PARAMETERS :
+ * @enable : enable: 1; disable 0
+ * @initCommit: if configuration list needs to be initialized and commited
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setToneMapMode(uint32_t enable, bool initCommit)
+{
+ int32_t rc = NO_ERROR;
+ LOGH("tone map mode %d ", enable);
+
+ if (initCommit) {
+ if (initBatchUpdate(m_pParamBuf) < 0) {
+ LOGE("Failed to initialize group update table");
+ return FAILED_TRANSACTION;
+ }
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_TONE_MAP_MODE, enable)) {
+ LOGE("Failed to update tone map mode");
+ return BAD_VALUE;
+ }
+
+ if (initCommit) {
+ rc = commitSetBatch();
+ if (rc != NO_ERROR) {
+ LOGE("Failed to commit tone map mode");
+ return rc;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getLongshotStages
+ *
+ * DESCRIPTION: get number of stages for longshot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : number of stages
+ *==========================================================================*/
+uint8_t QCameraParameters::getLongshotStages()
+{
+ uint8_t numStages =
+ isLowMemoryDevice() ? CAMERA_MIN_LONGSHOT_STAGES : CAMERA_DEFAULT_LONGSHOT_STAGES;
+
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.longshot.stages", prop, "0");
+ uint8_t propStages = atoi(prop);
+ if (propStages > 0 && propStages <= CAMERA_DEFAULT_LONGSHOT_STAGES) {
+ numStages = propStages;
+ }
+ return numStages;
+}
+
+/*===========================================================================
+ * FUNCTION : setCDSMode
+ *
+ * DESCRIPTION: set CDS mode
+ *
+ * PARAMETERS :
+ * @cds_mode : cds mode
+ * @initCommit: if configuration list needs to be initialized and commited
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCDSMode(int32_t cds_mode, bool initCommit)
+{
+ if (initCommit) {
+ if (initBatchUpdate(m_pParamBuf) < 0) {
+ LOGE("Failed to initialize group update table");
+ return FAILED_TRANSACTION;
+ }
+ }
+
+ int32_t rc = NO_ERROR;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+ LOGE("Failed to update cds mode");
+ return BAD_VALUE;
+ }
+
+ if (initCommit) {
+ rc = commitSetBatch();
+ if (NO_ERROR != rc) {
+ LOGE("Failed to set cds mode");
+ return rc;
+ }
+ }
+
+ LOGH("cds mode -> %d", cds_mode);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setLowLightCapture
+ *
+ * DESCRIPTION: Function to enable low light capture
+ *==========================================================================*/
+void QCameraParameters::setLowLightCapture()
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.llc", prop, "0");
+ m_LLCaptureEnabled = (atoi(prop) > 0) ? TRUE : FALSE;
+
+ if (!m_LLCaptureEnabled) {
+ m_LowLightLevel = CAM_LOW_LIGHT_OFF;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : fdModeInVideo
+ *
+ * DESCRIPTION: FD in Video change
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : FD Mode in Video
+ * 0 : If FD in Video disabled
+ * 1 : If FD in Video enabled for Detection, focus
+ * 2 : If FD in Video enabled only for focus
+ *==========================================================================*/
+uint8_t QCameraParameters::fdModeInVideo()
+{
+ char value[PROPERTY_VALUE_MAX];
+ uint8_t fdvideo = 0;
+
+ property_get("persist.camera.fdvideo", value, "0");
+ fdvideo = (atoi(value) > 0) ? atoi(value) : 0;
+
+ LOGD("FD mode in Video : %d", fdvideo);
+ return fdvideo;
+}
+
+/*===========================================================================
+ * FUNCTION : setManualCaptureMode
+ *
+ * DESCRIPTION: Function to set Manual capture modes
+ *
+ * PARAMETERS :
+ * @mode : Capture mode configured
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setManualCaptureMode(QCameraManualCaptureModes mode)
+{
+ int32_t rc = NO_ERROR;
+ char value[PROPERTY_VALUE_MAX];
+ int8_t count = 0;
+
+ property_get("persist.camera.manual.capture", value, "0");
+ count = atoi(value);
+
+ if (count) {
+ if (mode == CAM_MANUAL_CAPTURE_TYPE_OFF) {
+ m_ManualCaptureMode = CAM_MANUAL_CAPTURE_TYPE_1;
+ } else {
+ m_ManualCaptureMode = mode;
+ }
+ } else {
+ m_ManualCaptureMode = CAM_MANUAL_CAPTURE_TYPE_OFF;
+ }
+
+ if (m_ManualCaptureMode == CAM_MANUAL_CAPTURE_TYPE_2) {
+ setOfflineRAW(FALSE);
+ } else if (m_ManualCaptureMode >= CAM_MANUAL_CAPTURE_TYPE_3) {
+ setOfflineRAW(TRUE);
+ } else {
+ setOfflineRAW(FALSE);
+ }
+ setReprocCount();
+ LOGH("Manual capture mode - %d", m_ManualCaptureMode);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : isReprocScaleEnabled
+ *
+ * DESCRIPTION: Whether reprocess scale is enabled or not
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : TRUE : Reprocess scale is enabled
+ * FALSE : Reprocess scale is not enabled
+ *==========================================================================*/
+bool QCameraParameters::isReprocScaleEnabled()
+{
+ return m_reprocScaleParam.isScaleEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION : isUnderReprocScaling
+ *
+ * DESCRIPTION: Whether image is under reprocess scaling
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : TRUE : Image is under reprocess scaling
+ * FALSE : Image is not under reprocess scaling
+ *==========================================================================*/
+bool QCameraParameters::isUnderReprocScaling()
+{
+ return m_reprocScaleParam.isUnderScaling();
+}
+
+/*===========================================================================
+ * FUNCTION : getPicSizeFromAPK
+ *
+ * DESCRIPTION: Get picture size set from application.
+ *
+ * PARAMETERS :
+ * @width : with set by application
+ * @height : height set by application
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getPicSizeFromAPK(int &width, int &height)
+{
+ return m_reprocScaleParam.getPicSizeFromAPK(width, height);
+}
+
+
+
+/*===========================================================================
+ * FUNCTION : setDualLedCalibration
+ *
+ * DESCRIPTION: set dual led calibration
+ *
+ * PARAMETERS :
+ * @params : user setting parameters
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDualLedCalibration(
+ __unused const QCameraParameters& params)
+{
+ char value[PROPERTY_VALUE_MAX];
+ int32_t calibration = 0;
+
+ memset(value, 0, sizeof(value));
+ property_get("persist.camera.dual_led_calib", value, "0");
+ calibration = atoi(value);
+ if (calibration != m_dualLedCalibration) {
+ m_dualLedCalibration = calibration;
+ LOGD("%s:updating calibration=%d m_dualLedCalibration=%d",
+ __func__, calibration, m_dualLedCalibration);
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_DUAL_LED_CALIBRATION,
+ m_dualLedCalibration)) {
+ LOGE("%s:Failed to update dual led calibration param", __func__);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setinstantAEC
+ *
+ * DESCRIPTION: set instant AEC value to backend
+ *
+ * PARAMETERS :
+ * @value : instant aec enabled or not.
+ * 0 - disable
+ * 1 - Enable and set agressive AEC algo to the backend
+ * 2 - Enable and set fast AEC algo to the backend
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantAEC(uint8_t value, bool initCommit)
+{
+ if (initCommit) {
+ if (initBatchUpdate(m_pParamBuf) < 0) {
+ LOGE("Failed to initialize group update table");
+ return FAILED_TRANSACTION;
+ }
+ }
+
+ int32_t rc = NO_ERROR;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_INSTANT_AEC, value)) {
+ LOGE("Failed to instant aec value");
+ return BAD_VALUE;
+ }
+
+ // set the new value
+ char val[8];
+ snprintf(val, sizeof(val), "%d", value);
+ updateParamEntry(KEY_QC_INSTANT_AEC, val);
+
+ if (initCommit) {
+ rc = commitSetBatch();
+ if (NO_ERROR != rc) {
+ LOGE("Failed to instant aec value");
+ return rc;
+ }
+ }
+
+ LOGD(" Instant AEC value set to backend %d", value);
+ m_bInstantAEC = value;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setAdvancedCaptureMode
+ *
+ * DESCRIPTION: set advanced capture mode
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAdvancedCaptureMode()
+{
+ uint8_t value = isAdvCamFeaturesEnabled();
+ LOGD("updating advanced capture mode value to %d",value);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+ CAM_INTF_PARM_ADV_CAPTURE_MODE, value)) {
+ LOGE("Failed to set advanced capture mode param");
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getAnalysisInfo
+ *
+ * DESCRIPTION: Get the Analysis information based on
+ * current mode and feature mask
+ *
+ * PARAMETERS :
+ * @fdVideoEnabled : Whether fdVideo enabled currently
+ * @videoEnabled : Whether hal3 or hal1
+ * @featureMask : Feature mask
+ * @analysis_info : Analysis info to be filled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getAnalysisInfo(
+ bool fdVideoEnabled,
+ bool hal3,
+ cam_feature_mask_t featureMask,
+ cam_analysis_info_t *pAnalysisInfo)
+{
+ return mCommon.getAnalysisInfo(fdVideoEnabled, hal3, featureMask, pAnalysisInfo);
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraParameters.h b/camera/QCamera2/HAL/QCameraParameters.h
new file mode 100644
index 0000000..f730c14
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraParameters.h
@@ -0,0 +1,1234 @@
+/*
+** Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+** Not a Contribution. Apache license notifications and license are
+** retained for attribution purposes only.
+**
+** Copyright 2008, The Android Open Source Project
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <camera/CameraParameters.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "camera.h"
+#include "QCameraMem.h"
+#include "QCameraParametersIntf.h"
+#include "QCameraThermalAdapter.h"
+#include "QCameraCommon.h"
+
+extern "C" {
+#include "mm_jpeg_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0"
+
+#define FOCAL_LENGTH_DECIMAL_PRECISION 100
+
+#define CAMERA_MIN_BATCH_COUNT 4
+
+#define QCAMERA_MAX_EXP_TIME_LEVEL1 100
+#define QCAMERA_MAX_EXP_TIME_LEVEL2 500
+#define QCAMERA_MAX_EXP_TIME_LEVEL3 1000
+#define QCAMERA_MAX_EXP_TIME_LEVEL4 10000
+
+class QCameraParameters: private CameraParameters
+{
+
+private:
+
+ class QCameraReprocScaleParam{
+ public:
+
+ QCameraReprocScaleParam();
+ ~QCameraReprocScaleParam();
+
+ void setScaleEnable(bool enabled);
+ int32_t setScaleSizeTbl(size_t scale_cnt,
+ cam_dimension_t *scale_tbl, size_t org_cnt,
+ cam_dimension_t *org_tbl);
+ int32_t setValidatePicSize(int &width, int &height);
+
+ bool isScaleEnabled();
+ bool isUnderScaling();
+
+ size_t getScaleSizeTblCnt();
+ cam_dimension_t *getScaledSizeTbl();
+ size_t getTotalSizeTblCnt();
+ cam_dimension_t *getTotalSizeTbl();
+ int32_t getPicSizeFromAPK(int &width, int &height);
+ int32_t getPicSizeSetted(int &width, int &height);
+
+ private:
+ bool isScalePicSize(int width, int height);
+ bool isValidatePicSize(int width, int height);
+ int32_t setSensorSupportedPicSize();
+ size_t checkScaleSizeTable(size_t scale_cnt, cam_dimension_t *scale_tbl,
+ size_t org_cnt, cam_dimension_t *org_tbl);
+
+ bool mScaleEnabled;
+ bool mIsUnderScaling; //if in scale status
+
+ // picture size cnt that need scale operation
+ size_t mNeedScaleCnt;
+ cam_dimension_t mNeedScaledSizeTbl[MAX_SCALE_SIZES_CNT];
+
+ // sensor supported size cnt and table
+ size_t mSensorSizeTblCnt;
+ cam_dimension_t *mSensorSizeTbl;
+
+ // Total size cnt (sensor supported + need scale cnt)
+ size_t mTotalSizeTblCnt;
+ cam_dimension_t mTotalSizeTbl[MAX_SIZES_CNT];
+
+ cam_dimension_t mPicSizeFromAPK; // dimension that APK is expected
+ cam_dimension_t mPicSizeSetted; // dimension that config vfe
+ };
+
+ // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+ // Example value: "800x480,432x320". Read only.
+ static const char KEY_QC_SUPPORTED_HFR_SIZES[];
+ // The mode of preview frame rate.
+ // Example value: "frame-rate-auto, frame-rate-fixed".
+ static const char KEY_QC_PREVIEW_FRAME_RATE_MODE[];
+ static const char KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+ static const char KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[];
+ static const char KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[];
+ static const char KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+ // Supported live snapshot sizes
+ static const char KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[];
+
+ // Supported Raw formats
+ static const char KEY_QC_SUPPORTED_RAW_FORMATS[];
+ static const char KEY_QC_RAW_FORMAT[];
+
+ //Touch Af/AEC settings.
+ static const char KEY_QC_TOUCH_AF_AEC[];
+ static const char KEY_QC_SUPPORTED_TOUCH_AF_AEC[];
+ //Touch Index for AEC.
+ static const char KEY_QC_TOUCH_INDEX_AEC[];
+ //Touch Index for AF.
+ static const char KEY_QC_TOUCH_INDEX_AF[];
+ // Current auto scene detection mode.
+ // Example value: "off" or "on" constants. Read/write.
+ static const char KEY_QC_SCENE_DETECT[];
+ // Supported auto scene detection settings.
+ // Example value: "off,on". Read only.
+ static const char KEY_QC_SUPPORTED_SCENE_DETECT[];
+ static const char KEY_QC_SELECTABLE_ZONE_AF[];
+
+ static const char KEY_QC_ISO_MODE[];
+ static const char KEY_QC_CONTINUOUS_ISO[];
+ static const char KEY_QC_MIN_ISO[];
+ static const char KEY_QC_MAX_ISO[];
+ static const char KEY_QC_SUPPORTED_ISO_MODES[];
+ static const char KEY_QC_EXPOSURE_TIME[];
+ static const char KEY_QC_MIN_EXPOSURE_TIME[];
+ static const char KEY_QC_MAX_EXPOSURE_TIME[];
+ static const char KEY_QC_LENSSHADE[] ;
+ static const char KEY_QC_SUPPORTED_LENSSHADE_MODES[] ;
+ static const char KEY_QC_AUTO_EXPOSURE[];
+ static const char KEY_QC_SUPPORTED_AUTO_EXPOSURE[];
+
+ static const char KEY_QC_GPS_LATITUDE_REF[];
+ static const char KEY_QC_GPS_LONGITUDE_REF[];
+ static const char KEY_QC_GPS_ALTITUDE_REF[];
+ static const char KEY_QC_GPS_STATUS[];
+ static const char KEY_QC_MEMORY_COLOR_ENHANCEMENT[];
+ static const char KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+ static const char KEY_QC_DIS[];
+ static const char KEY_QC_OIS[];
+ static const char KEY_QC_SUPPORTED_DIS_MODES[];
+ static const char KEY_QC_SUPPORTED_OIS_MODES[];
+
+ static const char KEY_QC_ZSL[];
+ static const char KEY_QC_SUPPORTED_ZSL_MODES[];
+ static const char KEY_QC_ZSL_BURST_INTERVAL[];
+ static const char KEY_QC_ZSL_BURST_LOOKBACK[];
+ static const char KEY_QC_ZSL_QUEUE_DEPTH[];
+
+ static const char KEY_QC_CAMERA_MODE[];
+ static const char KEY_QC_ORIENTATION[];
+
+ static const char KEY_QC_VIDEO_HIGH_FRAME_RATE[];
+ static const char KEY_QC_VIDEO_HIGH_SPEED_RECORDING[];
+ static const char KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+ static const char KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[];
+ static const char KEY_QC_SUPPORTED_HDR_IMAGING_MODES[];
+ static const char KEY_QC_AE_BRACKET_HDR[];
+ static const char KEY_QC_SUPPORTED_AE_BRACKET_MODES[];
+ static const char KEY_QC_CAPTURE_BURST_EXPOSURE[];
+ static const char KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[];
+ static const char KEY_QC_NUM_RETRO_BURST_PER_SHUTTER[];
+ static const char KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD[];
+ static const char KEY_QC_SNAPSHOT_BURST_NUM[];
+ static const char KEY_QC_NO_DISPLAY_MODE[];
+ static const char KEY_QC_RAW_PICUTRE_SIZE[];
+ static const char KEY_QC_TINTLESS_ENABLE[];
+ static const char KEY_QC_SCENE_SELECTION[];
+ static const char KEY_QC_CDS_MODE[];
+ static const char KEY_QC_VIDEO_CDS_MODE[];
+ static const char KEY_QC_SUPPORTED_CDS_MODES[];
+ static const char KEY_QC_SUPPORTED_VIDEO_CDS_MODES[];
+ static const char KEY_QC_TNR_MODE[];
+ static const char KEY_QC_VIDEO_TNR_MODE[];
+ static const char KEY_QC_SUPPORTED_TNR_MODES[];
+ static const char KEY_QC_SUPPORTED_VIDEO_TNR_MODES[];
+
+ static const char KEY_INTERNAL_PERVIEW_RESTART[];
+ static const char KEY_QC_WB_MANUAL_CCT[];
+ static const char KEY_QC_MIN_WB_CCT[];
+ static const char KEY_QC_MAX_WB_CCT[];
+ static const char KEY_QC_MANUAL_WB_GAINS[];
+ static const char KEY_QC_MIN_WB_GAIN[];
+ static const char KEY_QC_MAX_WB_GAIN[];
+ static const char WHITE_BALANCE_MANUAL[];
+ static const char FOCUS_MODE_MANUAL_POSITION[];
+ static const char KEY_QC_LONG_SHOT[];
+ static const char KEY_QC_INITIAL_EXPOSURE_INDEX[];
+ static const char KEY_QC_INSTANT_AEC[];
+ static const char KEY_QC_INSTANT_CAPTURE[];
+ static const char KEY_QC_INSTANT_AEC_SUPPORTED_MODES[];
+ static const char KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES[];
+
+ static const char KEY_QC_MANUAL_FOCUS_POSITION[];
+ static const char KEY_QC_MANUAL_FOCUS_POS_TYPE[];
+ static const char KEY_QC_MIN_FOCUS_POS_INDEX[];
+ static const char KEY_QC_MAX_FOCUS_POS_INDEX[];
+ static const char KEY_QC_MIN_FOCUS_POS_DAC[];
+ static const char KEY_QC_MAX_FOCUS_POS_DAC[];
+ static const char KEY_QC_MIN_FOCUS_POS_RATIO[];
+ static const char KEY_QC_MAX_FOCUS_POS_RATIO[];
+ static const char KEY_QC_MIN_FOCUS_POS_DIOPTER[];
+ static const char KEY_QC_MAX_FOCUS_POS_DIOPTER[];
+ static const char KEY_QC_FOCUS_POSITION_SCALE[];
+ static const char KEY_QC_FOCUS_POSITION_DIOPTER[];
+
+ static const char KEY_QC_SUPPORTED_MANUAL_FOCUS_MODES[];
+ static const char KEY_QC_SUPPORTED_MANUAL_EXPOSURE_MODES[];
+ static const char KEY_QC_SUPPORTED_MANUAL_WB_MODES[];
+ static const char KEY_QC_FOCUS_SCALE_MODE[];
+ static const char KEY_QC_FOCUS_DIOPTER_MODE[];
+ static const char KEY_QC_ISO_PRIORITY[];
+ static const char KEY_QC_EXP_TIME_PRIORITY[];
+ static const char KEY_QC_USER_SETTING[];
+ static const char KEY_QC_WB_CCT_MODE[];
+ static const char KEY_QC_WB_GAIN_MODE[];
+ static const char KEY_QC_MANUAL_WB_TYPE[];
+ static const char KEY_QC_MANUAL_WB_VALUE[];
+ static const char KEY_QC_CURRENT_EXPOSURE_TIME[];
+ static const char KEY_QC_CURRENT_ISO[];
+ static const char KEY_QC_CACHE_VIDEO_BUFFERS[];
+
+ // DENOISE
+ static const char KEY_QC_DENOISE[];
+ static const char KEY_QC_SUPPORTED_DENOISE[];
+
+ //Selectable zone AF.
+ static const char KEY_QC_FOCUS_ALGO[];
+ static const char KEY_QC_SUPPORTED_FOCUS_ALGOS[];
+
+ //Face Detection
+ static const char KEY_QC_FACE_DETECTION[];
+ static const char KEY_QC_SUPPORTED_FACE_DETECTION[];
+
+ //Face Recognition
+ static const char KEY_QC_FACE_RECOGNITION[];
+ static const char KEY_QC_SUPPORTED_FACE_RECOGNITION[];
+
+ //Indicates number of faces requested by the application.
+ //This value will be rejected if the requested faces
+ //greater than supported by hardware.
+ //Write only.
+ static const char KEY_QC_MAX_NUM_REQUESTED_FACES[];
+
+ //preview flip
+ static const char KEY_QC_PREVIEW_FLIP[];
+ //video flip
+ static const char KEY_QC_VIDEO_FLIP[];
+ //snapshot picture flip
+ static const char KEY_QC_SNAPSHOT_PICTURE_FLIP[];
+
+ static const char KEY_QC_SUPPORTED_FLIP_MODES[];
+
+ //Face Detection, Facial processing requirement
+ static const char KEY_QC_SNAPSHOT_FD_DATA[];
+
+ //Auto HDR enable
+ static const char KEY_QC_AUTO_HDR_ENABLE[];
+ // video rotation
+ static const char KEY_QC_VIDEO_ROTATION[];
+ static const char KEY_QC_SUPPORTED_VIDEO_ROTATION_VALUES[];
+
+ //Redeye Reduction
+ static const char KEY_QC_REDEYE_REDUCTION[];
+ static const char KEY_QC_SUPPORTED_REDEYE_REDUCTION[];
+ static const char EFFECT_EMBOSS[];
+ static const char EFFECT_SKETCH[];
+ static const char EFFECT_NEON[];
+ static const char EFFECT_BEAUTY[];
+
+ //AF Bracketing
+ static const char KEY_QC_AF_BRACKET[];
+ static const char KEY_QC_SUPPORTED_AF_BRACKET_MODES[];
+
+ //Refocus
+ static const char KEY_QC_RE_FOCUS[];
+ static const char KEY_QC_SUPPORTED_RE_FOCUS_MODES[];
+
+ //Chroma Flash
+ static const char KEY_QC_CHROMA_FLASH[];
+ static const char KEY_QC_SUPPORTED_CHROMA_FLASH_MODES[];
+
+ //Opti Zoom
+ static const char KEY_QC_OPTI_ZOOM[];
+ static const char KEY_QC_SUPPORTED_OPTI_ZOOM_MODES[];
+
+ // Auto HDR supported
+ static const char KEY_QC_AUTO_HDR_SUPPORTED[];
+
+ // HDR modes
+ static const char KEY_QC_HDR_MODE[];
+ static const char KEY_QC_SUPPORTED_KEY_QC_HDR_MODES[];
+
+ //True Portrait
+ static const char KEY_QC_TRUE_PORTRAIT[];
+ static const char KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES[];
+
+ //See more
+ static const char KEY_QC_SEE_MORE[];
+ static const char KEY_QC_SUPPORTED_SEE_MORE_MODES[];
+
+ //Still more
+ static const char KEY_QC_STILL_MORE[];
+ static const char KEY_QC_SUPPORTED_STILL_MORE_MODES[];
+
+ //Noise reduction mode
+ static const char KEY_QC_NOISE_REDUCTION_MODE[];
+ static const char KEY_QC_NOISE_REDUCTION_MODE_VALUES[];
+
+ //Longshot
+ static const char KEY_QC_LONGSHOT_SUPPORTED[];
+
+ //ZSL+HDR
+ static const char KEY_QC_ZSL_HDR_SUPPORTED[];
+
+ // Values for Touch AF/AEC
+ static const char TOUCH_AF_AEC_OFF[];
+ static const char TOUCH_AF_AEC_ON[];
+
+ // Values for Scene mode
+ static const char SCENE_MODE_ASD[];
+ static const char SCENE_MODE_BACKLIGHT[];
+ static const char SCENE_MODE_FLOWERS[];
+ static const char SCENE_MODE_AR[];
+ static const char SCENE_MODE_HDR[];
+ static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+ static const char PIXEL_FORMAT_YV12[]; // NV12
+ static const char PIXEL_FORMAT_NV12[]; //NV12
+ static const char QC_PIXEL_FORMAT_NV12_VENUS[]; //NV12 VENUS
+
+ // Values for raw picture format
+ static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[];
+ static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[];
+ static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[];
+ static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GBRG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GRBG[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14RGGB[];
+ static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14BGGR[];
+
+ // ISO values
+ static const char ISO_AUTO[];
+ static const char ISO_HJR[];
+ static const char ISO_100[];
+ static const char ISO_200[];
+ static const char ISO_400[];
+ static const char ISO_800[];
+ static const char ISO_1600[];
+ static const char ISO_3200[];
+ static const char ISO_MANUAL[];
+
+ // Values for auto exposure settings.
+ static const char AUTO_EXPOSURE_FRAME_AVG[];
+ static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+ static const char AUTO_EXPOSURE_SPOT_METERING[];
+ static const char AUTO_EXPOSURE_SMART_METERING[];
+ static const char AUTO_EXPOSURE_USER_METERING[];
+ static const char AUTO_EXPOSURE_SPOT_METERING_ADV[];
+ static const char AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[];
+
+ // Values for instant AEC modes
+ static const char KEY_QC_INSTANT_AEC_DISABLE[];
+ static const char KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC[];
+ static const char KEY_QC_INSTANT_AEC_FAST_AEC[];
+
+ // Values for instant capture modes
+ static const char KEY_QC_INSTANT_CAPTURE_DISABLE[];
+ static const char KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC[];
+ static const char KEY_QC_INSTANT_CAPTURE_FAST_AEC[];
+
+ static const char KEY_QC_SHARPNESS[];
+ static const char KEY_QC_MIN_SHARPNESS[];
+ static const char KEY_QC_MAX_SHARPNESS[];
+ static const char KEY_QC_SHARPNESS_STEP[];
+ static const char KEY_QC_CONTRAST[];
+ static const char KEY_QC_MIN_CONTRAST[];
+ static const char KEY_QC_MAX_CONTRAST[];
+ static const char KEY_QC_CONTRAST_STEP[];
+ static const char KEY_QC_SATURATION[];
+ static const char KEY_QC_MIN_SATURATION[];
+ static const char KEY_QC_MAX_SATURATION[];
+ static const char KEY_QC_SATURATION_STEP[];
+ static const char KEY_QC_BRIGHTNESS[];
+ static const char KEY_QC_MIN_BRIGHTNESS[];
+ static const char KEY_QC_MAX_BRIGHTNESS[];
+ static const char KEY_QC_BRIGHTNESS_STEP[];
+ static const char KEY_QC_SCE_FACTOR[];
+ static const char KEY_QC_MIN_SCE_FACTOR[];
+ static const char KEY_QC_MAX_SCE_FACTOR[];
+ static const char KEY_QC_SCE_FACTOR_STEP[];
+
+ static const char KEY_QC_HISTOGRAM[] ;
+ static const char KEY_QC_SUPPORTED_HISTOGRAM_MODES[] ;
+ static const char KEY_QC_SUPPORTED_HDR_NEED_1X[];
+ static const char KEY_QC_HDR_NEED_1X[];
+ static const char KEY_QC_VIDEO_HDR[];
+ static const char KEY_QC_VT_ENABLE[];
+ static const char KEY_QC_SUPPORTED_VIDEO_HDR_MODES[];
+ static const char KEY_QC_SENSOR_HDR[];
+ static const char KEY_QC_SUPPORTED_SENSOR_HDR_MODES[];
+ static const char KEY_QC_RDI_MODE[];
+ static const char KEY_QC_SUPPORTED_RDI_MODES[];
+ static const char KEY_QC_SECURE_MODE[];
+ static const char KEY_QC_SUPPORTED_SECURE_MODES[];
+
+ // Values for SKIN TONE ENHANCEMENT
+ static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+ static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+ // Values for Denoise
+ static const char DENOISE_OFF[] ;
+ static const char DENOISE_ON[] ;
+
+ // Values for auto exposure settings.
+ static const char FOCUS_ALGO_AUTO[];
+ static const char FOCUS_ALGO_SPOT_METERING[];
+ static const char FOCUS_ALGO_CENTER_WEIGHTED[];
+ static const char FOCUS_ALGO_FRAME_AVERAGE[];
+
+ // Values for AE Bracketing settings.
+ static const char AE_BRACKET_OFF[];
+ static const char AE_BRACKET[];
+
+ // Values for AF Bracketing settings.
+ static const char AF_BRACKET_OFF[];
+ static const char AF_BRACKET_ON[];
+
+ // Values for Refocus settings.
+ static const char RE_FOCUS_OFF[];
+ static const char RE_FOCUS_ON[];
+
+ // Values for Chroma Flash settings.
+ static const char CHROMA_FLASH_OFF[];
+ static const char CHROMA_FLASH_ON[];
+
+ // Values for Opti Zoom settings.
+ static const char OPTI_ZOOM_OFF[];
+ static const char OPTI_ZOOM_ON[];
+
+ // Values for Still More settings.
+ static const char STILL_MORE_OFF[];
+ static const char STILL_MORE_ON[];
+
+ // Values for HDR mode settings.
+ static const char HDR_MODE_SENSOR[];
+ static const char HDR_MODE_MULTI_FRAME[];
+
+ // Values for True Portrait settings.
+ static const char TRUE_PORTRAIT_OFF[];
+ static const char TRUE_PORTRAIT_ON[];
+
+ // Values for HFR settings.
+ static const char VIDEO_HFR_OFF[];
+ static const char VIDEO_HFR_2X[];
+ static const char VIDEO_HFR_3X[];
+ static const char VIDEO_HFR_4X[];
+ static const char VIDEO_HFR_5X[];
+ static const char VIDEO_HFR_6X[];
+ static const char VIDEO_HFR_7X[];
+ static const char VIDEO_HFR_8X[];
+ static const char VIDEO_HFR_9X[];
+
+ // Values for feature on/off settings.
+ static const char VALUE_OFF[];
+ static const char VALUE_ON[];
+
+ // Values for feature enable/disable settings.
+ static const char VALUE_ENABLE[];
+ static const char VALUE_DISABLE[];
+
+ // Values for feature true/false settings.
+ static const char VALUE_FALSE[];
+ static const char VALUE_TRUE[];
+
+ //Values for flip settings
+ static const char FLIP_MODE_OFF[];
+ static const char FLIP_MODE_V[];
+ static const char FLIP_MODE_H[];
+ static const char FLIP_MODE_VH[];
+
+ //Values for CDS Mode
+ static const char CDS_MODE_OFF[];
+ static const char CDS_MODE_ON[];
+ static const char CDS_MODE_AUTO[];
+
+ static const char VALUE_FAST[];
+ static const char VALUE_HIGH_QUALITY[];
+
+ static const char KEY_SELECTED_AUTO_SCENE[];
+
+ // Values for Video rotation
+ static const char VIDEO_ROTATION_0[];
+ static const char VIDEO_ROTATION_90[];
+ static const char VIDEO_ROTATION_180[];
+ static const char VIDEO_ROTATION_270[];
+
+#ifdef TARGET_TS_MAKEUP
+ static const char KEY_TS_MAKEUP[];
+ static const char KEY_TS_MAKEUP_WHITEN[];
+ static const char KEY_TS_MAKEUP_CLEAN[];
+#endif
+ //param key for HFR batch size
+ static const char KEY_QC_VIDEO_BATCH_SIZE[];
+ enum {
+ CAMERA_ORIENTATION_UNKNOWN = 0,
+ CAMERA_ORIENTATION_PORTRAIT = 1,
+ CAMERA_ORIENTATION_LANDSCAPE = 2,
+ };
+
+ template <typename valueType> struct QCameraMap {
+ const char *const desc;
+ valueType val;
+ };
+
+public:
+ QCameraParameters();
+ QCameraParameters(const String8 &params);
+ ~QCameraParameters();
+
+ int32_t allocate();
+ int32_t init(cam_capability_t *,
+ mm_camera_vtbl_t *,
+ QCameraAdjustFPS *);
+ void deinit();
+ int32_t initDefaultParameters();
+ int32_t updateParameters(const String8& params, bool &needRestart);
+ int32_t commitParameters();
+
+ char* getParameters();
+ void getPreviewFpsRange(int *min_fps, int *max_fps) const {
+ CameraParameters::getPreviewFpsRange(min_fps, max_fps);
+ }
+#ifdef TARGET_TS_MAKEUP
+ bool getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const;
+#endif
+
+ int getPreviewHalPixelFormat();
+ int32_t getStreamRotation(cam_stream_type_t streamType,
+ cam_pp_feature_config_t &featureConfig,
+ cam_dimension_t &dim);
+ int32_t getStreamFormat(cam_stream_type_t streamType,
+ cam_format_t &format);
+ int32_t getStreamDimension(cam_stream_type_t streamType,
+ cam_dimension_t &dim);
+ void getThumbnailSize(int *width, int *height) const;
+
+
+ uint8_t getZSLBurstInterval();
+ uint8_t getZSLQueueDepth();
+ uint8_t getZSLBackLookCount();
+ uint8_t getMaxUnmatchedFramesInQueue();
+ bool isZSLMode() {return m_bZslMode;};
+ bool isRdiMode() {return m_bRdiMode;};
+ bool isSecureMode() {return m_bSecureMode;};
+ bool isNoDisplayMode() {return m_bNoDisplayMode;};
+ bool isWNREnabled() {return m_bWNROn;};
+ bool isTNRSnapshotEnabled() {return m_bTNRSnapshotOn;};
+ int32_t getCDSMode() {return mCds_mode;};
+ bool isLTMForSeeMoreEnabled() {return m_bLtmForSeeMoreEnabled;};
+ bool isHfrMode() {return m_bHfrMode;};
+ void getHfrFps(cam_fps_range_t &pFpsRange) { pFpsRange = m_hfrFpsRange;};
+ uint8_t getNumOfSnapshots();
+ uint8_t getNumOfRetroSnapshots();
+ uint8_t getNumOfExtraHDRInBufsIfNeeded();
+ uint8_t getNumOfExtraHDROutBufsIfNeeded();
+
+ bool getRecordingHintValue() {return m_bRecordingHint;}; // return local copy of video hint
+ uint32_t getJpegQuality();
+ uint32_t getRotation();
+ uint32_t getDeviceRotation();
+ uint32_t getJpegExifRotation();
+ bool useJpegExifRotation();
+ int32_t getEffectValue();
+ bool isInstantAECEnabled() {return m_bInstantAEC;};
+ bool isInstantCaptureEnabled() {return m_bInstantCapture;};
+ uint8_t getAecFrameBoundValue() {return mAecFrameBound;};
+ uint8_t getAecSkipDisplayFrameBound() {return mAecSkipDisplayFrameBound;};
+
+ int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
+ int32_t getExifFocalLength(rat_t *focalLenght);
+ uint16_t getExifIsoSpeed();
+ int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod, uint32_t &count);
+ int32_t getExifLatitude(rat_t *latitude, char *latRef);
+ int32_t getExifLongitude(rat_t *longitude, char *lonRef);
+ int32_t getExifAltitude(rat_t *altitude, char *altRef);
+ int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen, rat_t *gpsTimeStamp);
+ bool isVideoBuffersCached();
+ int32_t updateFocusDistances(cam_focus_distances_info_t *focusDistances);
+
+ bool isAEBracketEnabled();
+ int32_t setAEBracketing();
+ bool isFpsDebugEnabled() {return m_bDebugFps;};
+ bool isHistogramEnabled() {return m_bHistogramEnabled;};
+ bool isSceneSelectionEnabled() {return m_bSceneSelection;};
+ int32_t setSelectedScene(cam_scene_mode_type scene);
+ cam_scene_mode_type getSelectedScene();
+ bool isFaceDetectionEnabled() {return ((m_nFaceProcMask &
+ (CAM_FACE_PROCESS_MASK_DETECTION | CAM_FACE_PROCESS_MASK_FOCUS)) != 0);};
+ int32_t setFaceDetectionOption(bool enabled);
+ int32_t setHistogram(bool enabled);
+ int32_t setFaceDetection(bool enabled, bool initCommit);
+ int32_t setFrameSkip(enum msm_vfe_frame_skip_pattern pattern);
+ qcamera_thermal_mode getThermalMode() {return m_ThermalMode;};
+ int32_t updateRecordingHintValue(int32_t value);
+ int32_t setHDRAEBracket(cam_exp_bracketing_t hdrBracket);
+ bool isHDREnabled();
+ bool isAutoHDREnabled();
+ int32_t stopAEBracket();
+ int32_t updateRAW(cam_dimension_t max_dim);
+ bool isDISEnabled();
+ cam_is_type_t getISType();
+ cam_is_type_t getPreviewISType();
+ uint8_t getMobicatMask();
+
+ cam_focus_mode_type getFocusMode() const {return mFocusMode;};
+ int32_t setNumOfSnapshot();
+ int32_t adjustPreviewFpsRange(cam_fps_range_t *fpsRange);
+ bool isJpegPictureFormat() {return (mPictureFormat == CAM_FORMAT_JPEG);};
+ bool isNV16PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_422_NV16);};
+ bool isNV21PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_420_NV21);};
+ cam_denoise_process_type_t getDenoiseProcessPlate(cam_intf_parm_type_t type);
+ int32_t getMaxPicSize(cam_dimension_t &dim) { dim = m_maxPicSize; return NO_ERROR; };
+ int getFlipMode(cam_stream_type_t streamType);
+ bool isSnapshotFDNeeded();
+
+ bool isHDR1xFrameEnabled() {return m_bHDR1xFrameEnabled;}
+ bool isSupportedSensorHdrSize(const QCameraParameters& params);
+ bool isYUVFrameInfoNeeded();
+ const char*getFrameFmtString(cam_format_t fmt);
+ bool isHDR1xExtraBufferNeeded() {return m_bHDR1xExtraBufferNeeded;}
+ bool isHDROutputCropEnabled() {return m_bHDROutputCropEnabled;}
+
+ bool isPreviewFlipChanged() { return m_bPreviewFlipChanged; };
+ bool isVideoFlipChanged() { return m_bVideoFlipChanged; };
+ bool isSnapshotFlipChanged() { return m_bSnapshotFlipChanged; };
+ void setHDRSceneEnable(bool bflag);
+ int32_t updateAWBParams(cam_awb_params_t &awb_params);
+
+ const char *getASDStateString(cam_auto_scene_t scene);
+ bool isHDRThumbnailProcessNeeded() { return m_bHDRThumbnailProcessNeeded; };
+ void setMinPpMask(cam_feature_mask_t min_pp_mask) { m_nMinRequiredPpMask = min_pp_mask; };
+ bool setStreamConfigure(bool isCapture, bool previewAsPostview, bool resetConfig);
+ int32_t addOnlineRotation(uint32_t rotation, uint32_t streamId, int32_t device_rotation);
+ uint8_t getNumOfExtraBuffersForImageProc();
+ uint8_t getNumOfExtraBuffersForVideo();
+ uint8_t getNumOfExtraBuffersForPreview();
+ uint32_t getExifBufIndex(uint32_t captureIndex);
+ bool needThumbnailReprocess(cam_feature_mask_t *pFeatureMask);
+ inline bool isUbiFocusEnabled() {return m_bAFBracketingOn && !m_bReFocusOn;};
+ inline bool isChromaFlashEnabled() {return m_bChromaFlashOn;};
+ inline bool isHighQualityNoiseReductionMode() {return m_bHighQualityNoiseReductionMode;};
+ inline bool isTruePortraitEnabled() {return m_bTruePortraitOn;};
+ inline size_t getTPMaxMetaSize() {
+ return m_pCapability->true_portrait_settings_need.meta_max_size;};
+ inline bool isSeeMoreEnabled() {return m_bSeeMoreOn;};
+ inline bool isStillMoreEnabled() {return m_bStillMoreOn;};
+ bool isOptiZoomEnabled();
+
+ int32_t commitAFBracket(cam_af_bracketing_t afBracket);
+ int32_t set3ALock(bool lock3A);
+ int32_t setAndCommitZoom(int zoom_level);
+ uint8_t getBurstCountForAdvancedCapture();
+ uint32_t getNumberInBufsForSingleShot();
+ uint32_t getNumberOutBufsForSingleShot();
+ int32_t setLongshotEnable(bool enable);
+ String8 dump();
+ inline bool isUbiRefocus() {return m_bReFocusOn &&
+ (m_pCapability->refocus_af_bracketing_need.output_count > 1);};
+ inline uint32_t getRefocusMaxMetaSize() {
+ return m_pCapability->refocus_af_bracketing_need.meta_max_size;};
+ inline uint8_t getRefocusOutputCount() {
+ return m_pCapability->refocus_af_bracketing_need.output_count;};
+ inline bool generateThumbFromMain() {return isUbiFocusEnabled() ||
+ isChromaFlashEnabled() || isOptiZoomEnabled() || isUbiRefocus()
+ || isHDREnabled() || isStillMoreEnabled() || isTruePortraitEnabled(); }
+ void updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info);
+ void updateAEInfo(cam_3a_params_t &ae_params);
+ bool isAdvCamFeaturesEnabled() {return isUbiFocusEnabled() ||
+ isChromaFlashEnabled() || m_bOptiZoomOn || isHDREnabled() ||
+ isAEBracketEnabled() || isStillMoreEnabled() || isUbiRefocus();}
+ int32_t setAecLock(const char *aecStr);
+ int32_t updateDebugLevel();
+ bool is4k2kVideoResolution();
+ bool isUBWCEnabled();
+
+ int getBrightness();
+ int32_t updateOisValue(bool oisValue);
+ int32_t setIntEvent(cam_int_evt_params_t params);
+ bool getofflineRAW() {return mOfflineRAW;}
+ bool getQuadraCfa();
+ int32_t updatePpFeatureMask(cam_stream_type_t stream_type);
+ int32_t getStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t &pp_mask);
+ int32_t getSharpness() {return m_nSharpness;};
+ int32_t getEffect() {return mParmEffect;};
+ int32_t updateFlashMode(cam_flash_mode_t flash_mode);
+ int32_t configureAEBracketing(cam_capture_frame_config_t &frame_config);
+ int32_t configureHDRBracketing(cam_capture_frame_config_t &frame_config);
+ int32_t configFrameCapture(bool commitSettings);
+ int32_t resetFrameCapture(bool commitSettings);
+ cam_still_more_t getStillMoreSettings() {return m_stillmore_config;};
+ void setStillMoreSettings(cam_still_more_t stillmore_config)
+ {m_stillmore_config = stillmore_config;};
+ cam_still_more_t getStillMoreCapability()
+ {return m_pCapability->stillmore_settings_need;};
+ cam_dyn_img_data_t getDynamicImgData() { return m_DynamicImgData; }
+ void setDynamicImgData(cam_dyn_img_data_t d) { m_DynamicImgData = d; }
+
+ int32_t getParmZoomLevel(){return mParmZoomLevel;};
+ int8_t getReprocCount(){return mTotalPPCount;};
+ bool isMultiPassReprocessing();
+ int8_t getCurPPCount(){return mCurPPCount;};
+ void setReprocCount();
+ bool isPostProcScaling();
+ bool isLLNoiseEnabled();
+ void setCurPPCount(int8_t count) {mCurPPCount = count;};
+ int32_t setQuadraCfaMode(uint32_t value, bool initCommit);
+ int32_t setToneMapMode(uint32_t value, bool initCommit);
+ void setTintless(bool enable);
+ uint8_t getLongshotStages();
+ int8_t getBufBatchCount() {return mBufBatchCnt;};
+ int8_t getVideoBatchSize() {return mVideoBatchSize;};
+
+ int32_t setManualCaptureMode(
+ QCameraManualCaptureModes value = CAM_MANUAL_CAPTURE_TYPE_OFF);
+ QCameraManualCaptureModes getManualCaptureMode()
+ {return m_ManualCaptureMode;};
+ int64_t getExposureTime() {return m_expTime;};
+
+ cam_capture_frame_config_t getCaptureFrameConfig()
+ { return m_captureFrameConfig; };
+ void setJpegRotation(int rotation);
+ uint32_t getJpegRotation() { return mJpegRotation;};
+
+ void setLowLightLevel(cam_low_light_mode_t value)
+ { m_LowLightLevel = value; };
+ cam_low_light_mode_t getLowLightLevel() {return m_LowLightLevel;};
+ bool getLowLightCapture() { return m_LLCaptureEnabled; };
+
+ /* Dual camera specific */
+ bool getDcrf() { return m_bDcrfEnabled; }
+ int32_t setRelatedCamSyncInfo(
+ cam_sync_related_sensors_event_info_t* info);
+ const cam_sync_related_sensors_event_info_t*
+ getRelatedCamSyncInfo(void);
+ int32_t setFrameSyncEnabled(bool enable);
+ bool isFrameSyncEnabled(void);
+ int32_t getRelatedCamCalibration(
+ cam_related_system_calibration_data_t* calib);
+ int32_t bundleRelatedCameras(bool sync, uint32_t sessionid);
+ uint8_t fdModeInVideo();
+ bool isOEMFeatEnabled() { return m_bOEMFeatEnabled; }
+
+ int32_t setZslMode(bool value);
+ int32_t updateZSLModeValue(bool value);
+
+ bool isReprocScaleEnabled();
+ bool isUnderReprocScaling();
+ int32_t getPicSizeFromAPK(int &width, int &height);
+
+ int32_t checkFeatureConcurrency();
+ int32_t setInstantAEC(uint8_t enable, bool initCommit);
+
+ int32_t getAnalysisInfo(
+ bool fdVideoEnabled,
+ bool hal3,
+ cam_feature_mask_t featureMask,
+ cam_analysis_info_t *pAnalysisInfo);
+private:
+ int32_t setPreviewSize(const QCameraParameters& );
+ int32_t setVideoSize(const QCameraParameters& );
+ int32_t setPictureSize(const QCameraParameters& );
+ int32_t setLiveSnapshotSize(const QCameraParameters& );
+ int32_t setPreviewFormat(const QCameraParameters& );
+ int32_t setPictureFormat(const QCameraParameters& );
+ int32_t setOrientation(const QCameraParameters& );
+ int32_t setJpegThumbnailSize(const QCameraParameters& );
+ int32_t setJpegQuality(const QCameraParameters& );
+ int32_t setPreviewFpsRange(const QCameraParameters& );
+ int32_t setPreviewFrameRate(const QCameraParameters& );
+ int32_t setAutoExposure(const QCameraParameters& );
+ int32_t setEffect(const QCameraParameters& );
+ int32_t setBrightness(const QCameraParameters& );
+ int32_t setFocusMode(const QCameraParameters& );
+ int32_t setFocusPosition(const QCameraParameters& );
+ int32_t setSharpness(const QCameraParameters& );
+ int32_t setSaturation(const QCameraParameters& );
+ int32_t setContrast(const QCameraParameters& );
+ int32_t setSkinToneEnhancement(const QCameraParameters& );
+ int32_t setSceneDetect(const QCameraParameters& );
+ int32_t setVideoHDR(const QCameraParameters& );
+ int32_t setVtEnable(const QCameraParameters& );
+ int32_t setZoom(const QCameraParameters& );
+ int32_t setISOValue(const QCameraParameters& );
+ int32_t setContinuousISO(const QCameraParameters& );
+ int32_t setExposureTime(const QCameraParameters& );
+ int32_t setRotation(const QCameraParameters& );
+ int32_t setVideoRotation(const QCameraParameters& );
+ int32_t setFlash(const QCameraParameters& );
+ int32_t setAecLock(const QCameraParameters& );
+ int32_t setAwbLock(const QCameraParameters& );
+ int32_t setMCEValue(const QCameraParameters& );
+ int32_t setDISValue(const QCameraParameters& params);
+ int32_t setLensShadeValue(const QCameraParameters& );
+ int32_t setExposureCompensation(const QCameraParameters& );
+ int32_t setWhiteBalance(const QCameraParameters& );
+ int32_t setManualWhiteBalance(const QCameraParameters& );
+ int32_t setAntibanding(const QCameraParameters& );
+ int32_t setFocusAreas(const QCameraParameters& );
+ int32_t setMeteringAreas(const QCameraParameters& );
+ int32_t setSceneMode(const QCameraParameters& );
+ int32_t setSelectableZoneAf(const QCameraParameters& );
+ int32_t setAEBracket(const QCameraParameters& );
+ int32_t setAFBracket(const QCameraParameters& );
+ int32_t setReFocus(const QCameraParameters& );
+ int32_t setChromaFlash(const QCameraParameters& );
+ int32_t setOptiZoom(const QCameraParameters& );
+ int32_t setHDRMode(const QCameraParameters& );
+ int32_t setHDRNeed1x(const QCameraParameters& );
+ int32_t setTruePortrait(const QCameraParameters& );
+ int32_t setSeeMore(const QCameraParameters& );
+ int32_t setStillMore(const QCameraParameters& );
+ int32_t setNoiseReductionMode(const QCameraParameters& );
+ int32_t setRedeyeReduction(const QCameraParameters& );
+ int32_t setGpsLocation(const QCameraParameters& );
+ int32_t setRecordingHint(const QCameraParameters& );
+ int32_t setNoDisplayMode(const QCameraParameters& );
+ int32_t setWaveletDenoise(const QCameraParameters& );
+ int32_t setTemporalDenoise(const QCameraParameters&);
+ int32_t setZslMode(const QCameraParameters& );
+ int32_t setZslAttributes(const QCameraParameters& );
+ int32_t setAutoHDR(const QCameraParameters& params);
+ int32_t setCameraMode(const QCameraParameters& );
+ int32_t setSceneSelectionMode(const QCameraParameters& params);
+ int32_t setFaceRecognition(const QCameraParameters& );
+ int32_t setFlip(const QCameraParameters& );
+ int32_t setRetroActiveBurstNum(const QCameraParameters& params);
+ int32_t setBurstLEDOnPeriod(const QCameraParameters& params);
+ int32_t setSnapshotFDReq(const QCameraParameters& );
+ int32_t setStatsDebugMask();
+ int32_t setPAAF();
+ int32_t setTintlessValue(const QCameraParameters& params);
+ int32_t setCDSMode(const QCameraParameters& params);
+ int32_t setInitialExposureIndex(const QCameraParameters& params);
+ int32_t setInstantCapture(const QCameraParameters& params);
+ int32_t setInstantAEC(const QCameraParameters& params);
+ int32_t setMobicat(const QCameraParameters& params);
+ int32_t setRdiMode(const QCameraParameters& );
+ int32_t setSecureMode(const QCameraParameters& );
+ int32_t setCacheVideoBuffers(const QCameraParameters& params);
+ int32_t setCustomParams(const QCameraParameters& params);
+ int32_t setAutoExposure(const char *autoExp);
+ int32_t setPreviewFpsRange(int min_fps,int max_fps,
+ int vid_min_fps,int vid_max_fps);
+ int32_t setEffect(const char *effect);
+ int32_t setBrightness(int brightness);
+ int32_t setFocusMode(const char *focusMode);
+ int32_t setFocusPosition(const char *typeStr, const char *posStr);
+ int32_t setSharpness(int sharpness);
+ int32_t setSaturation(int saturation);
+ int32_t setContrast(int contrast);
+ int32_t setSkinToneEnhancement(int sceFactor);
+ int32_t setSceneDetect(const char *scendDetect);
+ int32_t setVideoHDR(const char *videoHDR);
+ int32_t setSensorSnapshotHDR(const char *snapshotHDR);
+ int32_t setVtEnable(const char *vtEnable);
+ int32_t setZoom(int zoom_level);
+ int32_t setISOValue(const char *isoValue);
+ int32_t setContinuousISO(const char *isoValue);
+ int32_t setExposureTime(const char *expTimeStr);
+ int32_t setFlash(const char *flashStr);
+ int32_t setAwbLock(const char *awbStr);
+ int32_t setMCEValue(const char *mceStr);
+ int32_t setDISValue(const char *disStr);
+ int32_t setHighFrameRate(const int32_t hfrMode);
+ int32_t setLensShadeValue(const char *lensShadeStr);
+ int32_t setExposureCompensation(int expComp);
+ int32_t setWhiteBalance(const char *wbStr);
+ int32_t setWBManualCCT(const char *cctStr);
+ int32_t setManualWBGains(const char *gainStr);
+ int32_t setAntibanding(const char *antiBandingStr);
+ int32_t setFocusAreas(const char *focusAreasStr);
+ int32_t setMeteringAreas(const char *meteringAreasStr);
+ int32_t setSceneMode(const char *sceneModeStr);
+ int32_t setSelectableZoneAf(const char *selZoneAFStr);
+ int32_t setAEBracket(const char *aecBracketStr);
+ int32_t setAFBracket(const char *afBracketStr);
+ int32_t setReFocus(const char *reFocusStr);
+ int32_t setChromaFlash(const char *chromaFlashStr);
+ int32_t setOptiZoom(const char *optiZoomStr);
+ int32_t setHDRMode(const char *optiZoomStr);
+ int32_t setHDRNeed1x(const char *optiZoomStr);
+ int32_t setTruePortrait(const char *truePortraitStr);
+ int32_t setSeeMore(const char *SeeMoreStr);
+ int32_t setStillMore(const char *StillMoreStr);
+ int32_t setNoiseReductionMode(const char *noiseReductionModeStr);
+ int32_t setRedeyeReduction(const char *redeyeStr);
+ int32_t setWaveletDenoise(const char *wnrStr);
+ int32_t setFaceRecognition(const char *faceRecog, uint32_t maxFaces);
+ int32_t setTintlessValue(const char *tintStr);
+ bool UpdateHFRFrameRate(const QCameraParameters& params);
+ int32_t setRdiMode(const char *str);
+ int32_t setSecureMode(const char *str);
+ int32_t setLongshotParam(const QCameraParameters& params);
+ int32_t parseGains(const char *gainStr, double &r_gain,
+ double &g_gain, double &b_gain);
+ int32_t setCacheVideoBuffers(const char *cacheVideoBufStr);
+ int32_t setCDSMode(int32_t cds_mode, bool initCommit);
+ int32_t setEztune();
+ void setLowLightCapture();
+ int setRecordingHintValue(int32_t value); // set local copy of video hint and send to server
+ // no change in parameters value
+ int32_t updateFlash(bool commitSettings);
+ int32_t setRawSize(cam_dimension_t &dim);
+ int32_t setMaxPicSize(cam_dimension_t &dim) { m_maxPicSize = dim; return NO_ERROR; };
+ void setBufBatchCount(int8_t buf_cnt);
+ void setVideoBatchSize();
+ void setDcrf();
+ int32_t setStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t pp_mask);
+ void setOfflineRAW(bool value = 0);
+ int32_t setQuadraCfa(const QCameraParameters& params);
+ int32_t configureFlash(cam_capture_frame_config_t &frame_config);
+ int32_t configureLowLight(cam_capture_frame_config_t &frame_config);
+ int32_t configureManualCapture(cam_capture_frame_config_t &frame_config);
+
+ bool isTNRPreviewEnabled() {return m_bTNRPreviewOn;};
+ bool isTNRVideoEnabled() {return m_bTNRVideoOn;};
+ bool getFaceDetectionOption() { return m_bFaceDetectionOn;}
+ bool isAVTimerEnabled();
+ void getLiveSnapshotSize(cam_dimension_t &dim);
+ int32_t getRawSize(cam_dimension_t &dim) {dim = m_rawSize; return NO_ERROR;};
+ int getAutoFlickerMode();
+ bool sendStreamConfigInfo(cam_stream_size_info_t &stream_config_info);
+ inline bool isLowMemoryDevice() {return m_bIsLowMemoryDevice;};
+ bool isPreviewSeeMoreRequired();
+ bool isEztuneEnabled() { return m_bEztuneEnabled; };
+ int32_t getZoomLevel(){return mZoomLevel;};
+ int32_t parse_pair(const char *str, int *first, int *second,
+ char delim, char **endptr);
+ void parseSizesList(const char *sizesStr, Vector<Size> &sizes);
+ int32_t parseNDimVector(const char *str, int *num, int N, char delim);
+ int32_t parseCameraAreaString(const char *str, int max_num_areas,
+ cam_area_t *pAreas, int& num_areas_found);
+ bool validateCameraAreas(cam_area_t *areas, int num_areas);
+ int parseGPSCoordinate(const char *coord_str, rat_t *coord);
+ int32_t getRational(rat_t *rat, int num, int denom);
+ String8 createSizesString(const cam_dimension_t *sizes, size_t len);
+ String8 createHfrValuesString(const cam_hfr_info_t *values, size_t len,
+ const QCameraMap<cam_hfr_mode_t> *map, size_t map_len);
+ String8 createHfrSizesString(const cam_hfr_info_t *values, size_t len);
+ String8 createFpsRangeString(const cam_fps_range_t *fps,
+ size_t len, int &default_fps_index);
+ String8 createFpsString(cam_fps_range_t &fps);
+ String8 createZoomRatioValuesString(uint32_t *zoomRatios, size_t length);
+ int32_t setDualLedCalibration(const QCameraParameters& params);
+ int32_t setAdvancedCaptureMode();
+
+ // ops for batch set/get params with server
+ int32_t initBatchUpdate(parm_buffer_t *p_table);
+ int32_t commitSetBatch();
+ int32_t commitGetBatch();
+
+ // ops to tempororily update parameter entries and commit
+ int32_t updateParamEntry(const char *key, const char *value);
+ int32_t commitParamChanges();
+ void updateViewAngles();
+
+ // Map from strings to values
+ static const cam_dimension_t THUMBNAIL_SIZES_MAP[];
+ static const QCameraMap<cam_auto_exposure_mode_type> AUTO_EXPOSURE_MAP[];
+ static const QCameraMap<cam_aec_convergence_type> INSTANT_CAPTURE_MODES_MAP[];
+ static const QCameraMap<cam_aec_convergence_type> INSTANT_AEC_MODES_MAP[];
+ static const QCameraMap<cam_format_t> PREVIEW_FORMATS_MAP[];
+ static const QCameraMap<cam_format_t> PICTURE_TYPES_MAP[];
+ static const QCameraMap<cam_focus_mode_type> FOCUS_MODES_MAP[];
+ static const QCameraMap<cam_effect_mode_type> EFFECT_MODES_MAP[];
+ static const QCameraMap<cam_scene_mode_type> SCENE_MODES_MAP[];
+ static const QCameraMap<cam_flash_mode_t> FLASH_MODES_MAP[];
+ static const QCameraMap<cam_focus_algorithm_type> FOCUS_ALGO_MAP[];
+ static const QCameraMap<cam_wb_mode_type> WHITE_BALANCE_MODES_MAP[];
+ static const QCameraMap<cam_antibanding_mode_type> ANTIBANDING_MODES_MAP[];
+ static const QCameraMap<cam_iso_mode_type> ISO_MODES_MAP[];
+ static const QCameraMap<cam_hfr_mode_t> HFR_MODES_MAP[];
+ static const QCameraMap<cam_bracket_mode> BRACKETING_MODES_MAP[];
+ static const QCameraMap<int> ON_OFF_MODES_MAP[];
+ static const QCameraMap<int> ENABLE_DISABLE_MODES_MAP[];
+ static const QCameraMap<int> DENOISE_ON_OFF_MODES_MAP[];
+ static const QCameraMap<int> TRUE_FALSE_MODES_MAP[];
+ static const QCameraMap<int> TOUCH_AF_AEC_MODES_MAP[];
+ static const QCameraMap<cam_flip_t> FLIP_MODES_MAP[];
+ static const QCameraMap<int> AF_BRACKETING_MODES_MAP[];
+ static const QCameraMap<int> RE_FOCUS_MODES_MAP[];
+ static const QCameraMap<int> CHROMA_FLASH_MODES_MAP[];
+ static const QCameraMap<int> OPTI_ZOOM_MODES_MAP[];
+ static const QCameraMap<int> TRUE_PORTRAIT_MODES_MAP[];
+ static const QCameraMap<cam_cds_mode_type_t> CDS_MODES_MAP[];
+ static const QCameraMap<int> HDR_MODES_MAP[];
+ static const QCameraMap<int> VIDEO_ROTATION_MODES_MAP[];
+ static const QCameraMap<int> SEE_MORE_MODES_MAP[];
+ static const QCameraMap<int> STILL_MORE_MODES_MAP[];
+ static const QCameraMap<int> NOISE_REDUCTION_MODES_MAP[];
+
+ QCameraReprocScaleParam m_reprocScaleParam;
+ QCameraCommon mCommon;
+
+ cam_capability_t *m_pCapability;
+ mm_camera_vtbl_t *m_pCamOpsTbl;
+ QCameraHeapMemory *m_pParamHeap;
+ parm_buffer_t *m_pParamBuf; // ptr to param buf in m_pParamHeap
+ /* heap for mapping dual cam event info */
+ QCameraHeapMemory *m_pRelCamSyncHeap;
+ /* ptr to sync buffer in m_pRelCamSyncHeap */
+ cam_sync_related_sensors_event_info_t *m_pRelCamSyncBuf;
+ cam_sync_related_sensors_event_info_t m_relCamSyncInfo;
+ bool m_bFrameSyncEnabled;
+ cam_is_type_t mIsType;
+ cam_is_type_t mIsTypePreview;
+
+ bool m_bZslMode; // if ZSL is enabled
+ bool m_bZslMode_new;
+ bool m_bForceZslMode;
+ bool m_bRecordingHint; // local copy of recording hint
+ bool m_bRecordingHint_new;
+ bool m_bHistogramEnabled; // if histogram is enabled
+ bool m_bLongshotEnabled; // if longshot is enabled
+ uint32_t m_nFaceProcMask; // face process mask
+ bool m_bFaceDetectionOn; // if face Detection turned on by user
+ bool m_bDebugFps; // if FPS need to be logged
+ cam_focus_mode_type mFocusMode;
+ cam_format_t mPreviewFormat;
+ cam_format_t mAppPreviewFormat;
+ int32_t mPictureFormat; // could be CAMERA_PICTURE_TYPE_JPEG or cam_format_t
+ bool m_bNeedRestart; // if preview needs restart after parameters updated
+ bool m_bNoDisplayMode;
+ bool m_bWNROn;
+ bool m_bTNRPreviewOn;
+ bool m_bTNRVideoOn;
+ bool m_bTNRSnapshotOn;
+ bool m_bInited;
+ int m_nRetroBurstNum;
+ int m_nBurstLEDOnPeriod;
+ cam_exp_bracketing_t m_AEBracketingClient;
+ bool m_bUpdateEffects; // Cause reapplying of effects
+ bool m_bSceneTransitionAuto; // Indicate that scene has changed to Auto
+ bool m_bPreviewFlipChanged; // if flip setting for preview changed
+ bool m_bVideoFlipChanged; // if flip setting for video changed
+ bool m_bSnapshotFlipChanged; // if flip setting for snapshot changed
+ bool m_bFixedFrameRateSet; // Indicates that a fixed frame rate is set
+ qcamera_thermal_mode m_ThermalMode; // adjust fps vs adjust frameskip
+ cam_dimension_t m_LiveSnapshotSize; // live snapshot size
+ cam_dimension_t m_rawSize; // live snapshot size
+ cam_dimension_t m_maxPicSize;
+ bool m_bHDREnabled; // if HDR is enabled
+ bool m_bLocalHDREnabled; // This flag tells whether HDR enabled or not regarless of APP mode
+ bool m_bAVTimerEnabled; //if AVTimer is enabled
+ bool m_bDISEnabled;
+ bool m_bOISEnabled;
+ cam_still_more_t m_stillmore_config;
+
+ uint8_t m_MobiMask;
+ QCameraAdjustFPS *m_AdjustFPS;
+ bool m_bHDR1xFrameEnabled; // if frame with exposure compensation 0 during HDR is enabled
+ bool m_HDRSceneEnabled; // Auto HDR indication
+ bool m_bHDRThumbnailProcessNeeded; // if thumbnail need to be processed for HDR
+ bool m_bHDR1xExtraBufferNeeded; // if extra frame with exposure compensation 0 during HDR is needed
+ bool m_bHDROutputCropEnabled; // if HDR output frame need to be scaled to user resolution
+ DefaultKeyedVector<String8,String8> m_tempMap; // map for temororily store parameters to be set
+ cam_fps_range_t m_default_fps_range;
+ bool m_bAFBracketingOn;
+ bool m_bReFocusOn;
+ bool m_bChromaFlashOn;
+ bool m_bOptiZoomOn;
+ bool m_bSceneSelection;
+ Mutex m_SceneSelectLock;
+ cam_scene_mode_type m_SelectedScene;
+ bool m_bSeeMoreOn;
+ bool m_bStillMoreOn;
+ bool m_bHighQualityNoiseReductionMode;
+ cam_fps_range_t m_hfrFpsRange;
+ bool m_bHfrMode;
+ bool m_bSensorHDREnabled; // if HDR is enabled
+ bool m_bRdiMode; // if RDI mode
+ bool m_bSecureMode;
+ bool m_bAeBracketingEnabled;
+ int32_t mFlashValue;
+ int32_t mFlashDaemonValue;
+ int32_t mHfrMode;
+ bool m_bHDRModeSensor;
+ bool mOfflineRAW;
+ bool m_bTruePortraitOn;
+ cam_feature_mask_t m_nMinRequiredPpMask;
+ cam_feature_mask_t mStreamPpMask[CAM_STREAM_TYPE_MAX];
+ int32_t m_nSharpness;
+ int8_t mTotalPPCount;
+ int8_t mCurPPCount;
+ int32_t mZoomLevel;
+ bool m_bStreamsConfigured;
+ int32_t mParmZoomLevel;
+ bool m_bIsLowMemoryDevice;
+ int32_t mCds_mode;
+ int32_t mParmEffect;
+ cam_capture_frame_config_t m_captureFrameConfig;
+ int8_t mBufBatchCnt;
+ bool m_bEztuneEnabled;
+ bool m_bDcrfEnabled;
+ uint32_t mRotation;
+ uint32_t mJpegRotation;
+ int8_t mVideoBatchSize;
+ bool m_LLCaptureEnabled;
+ cam_low_light_mode_t m_LowLightLevel;
+ bool m_bLtmForSeeMoreEnabled;
+ int64_t m_expTime;
+ bool m_bOEMFeatEnabled;
+ int32_t m_isoValue;
+ QCameraManualCaptureModes m_ManualCaptureMode;
+ cam_dyn_img_data_t m_DynamicImgData;
+ int32_t m_dualLedCalibration;
+ // Param to trigger instant AEC.
+ bool m_bInstantAEC;
+ // Param to trigger instant capture.
+ bool m_bInstantCapture;
+ // Number of frames, camera interface will wait for getting the instant capture frame.
+ uint8_t mAecFrameBound;
+ // Number of preview frames, that HAL will hold without displaying, for instant AEC mode.
+ uint8_t mAecSkipDisplayFrameBound;
+ bool m_bQuadraCfa;
+};
+
+}; // namespace qcamera
+
+#endif
diff --git a/camera/QCamera2/HAL/QCameraParametersIntf.cpp b/camera/QCamera2/HAL/QCameraParametersIntf.cpp
new file mode 100644
index 0000000..977077b
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraParametersIntf.cpp
@@ -0,0 +1,1421 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraParametersIntf"
+
+// System dependencies
+#include <utils/Mutex.h>
+
+// Camera dependencies
+#include "QCameraParameters.h"
+#include "QCameraParametersIntf.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+#define CHECK_PARAM_INTF(impl) LOG_ALWAYS_FATAL_IF(((impl) == NULL), "impl is NULL!")
+
+QCameraParametersIntf::QCameraParametersIntf() :
+ mImpl(NULL)
+{
+}
+
+QCameraParametersIntf::~QCameraParametersIntf()
+{
+ {
+ Mutex::Autolock lock(mLock);
+ if (mImpl) {
+ delete mImpl;
+ mImpl = NULL;
+ }
+ }
+}
+
+
+int32_t QCameraParametersIntf::allocate()
+{
+ Mutex::Autolock lock(mLock);
+ mImpl = new QCameraParameters();
+ if (!mImpl) {
+ LOGE("Out of memory");
+ return NO_MEMORY;
+ }
+
+ return mImpl->allocate();
+}
+
+int32_t QCameraParametersIntf::init(cam_capability_t *capabilities,
+ mm_camera_vtbl_t *mmOps,
+ QCameraAdjustFPS *adjustFPS)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->init(capabilities, mmOps, adjustFPS);
+}
+
+void QCameraParametersIntf::deinit()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->deinit();
+}
+
+int32_t QCameraParametersIntf::updateParameters(const String8& params, bool &needRestart)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateParameters(params, needRestart);
+}
+
+int32_t QCameraParametersIntf::commitParameters()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->commitParameters();
+}
+
+char* QCameraParametersIntf::QCameraParametersIntf::getParameters()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getParameters();
+}
+
+void QCameraParametersIntf::getPreviewFpsRange(int *min_fps, int *max_fps) const
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->getPreviewFpsRange(min_fps, max_fps);
+}
+
+#ifdef TARGET_TS_MAKEUP
+bool QCameraParametersIntf::getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getTsMakeupInfo(whiteLevel, cleanLevel);
+}
+#endif
+
+int QCameraParametersIntf::getPreviewHalPixelFormat()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getPreviewHalPixelFormat();
+}
+
+int32_t QCameraParametersIntf::getStreamRotation(cam_stream_type_t streamType,
+ cam_pp_feature_config_t &featureConfig,
+ cam_dimension_t &dim)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getStreamRotation(streamType, featureConfig, dim);
+
+}
+
+int32_t QCameraParametersIntf::getStreamFormat(cam_stream_type_t streamType,
+ cam_format_t &format)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getStreamFormat(streamType, format);
+}
+
+int32_t QCameraParametersIntf::getStreamDimension(cam_stream_type_t streamType,
+ cam_dimension_t &dim)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getStreamDimension(streamType, dim);
+}
+
+void QCameraParametersIntf::getThumbnailSize(int *width, int *height) const
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->getThumbnailSize(width, height);
+}
+
+uint8_t QCameraParametersIntf::getZSLBurstInterval()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getZSLBurstInterval();
+}
+
+uint8_t QCameraParametersIntf::getZSLQueueDepth()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getZSLQueueDepth();
+}
+
+uint8_t QCameraParametersIntf::getZSLBackLookCount()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getZSLBackLookCount();
+}
+
+uint8_t QCameraParametersIntf::getMaxUnmatchedFramesInQueue()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getMaxUnmatchedFramesInQueue();
+}
+
+bool QCameraParametersIntf::isZSLMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isZSLMode();
+}
+
+bool QCameraParametersIntf::isRdiMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isRdiMode();
+}
+
+bool QCameraParametersIntf::isSecureMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isSecureMode();
+}
+
+bool QCameraParametersIntf::isNoDisplayMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isNoDisplayMode();
+}
+
+bool QCameraParametersIntf::isWNREnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isWNREnabled();
+}
+
+bool QCameraParametersIntf::isTNRSnapshotEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isTNRSnapshotEnabled();
+}
+
+int32_t QCameraParametersIntf::getCDSMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getCDSMode();
+}
+
+bool QCameraParametersIntf::isLTMForSeeMoreEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isLTMForSeeMoreEnabled();
+}
+
+bool QCameraParametersIntf::isHfrMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHfrMode();
+}
+
+void QCameraParametersIntf::getHfrFps(cam_fps_range_t &pFpsRange)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->getHfrFps(pFpsRange);
+}
+
+uint8_t QCameraParametersIntf::getNumOfSnapshots()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfSnapshots();
+}
+
+uint8_t QCameraParametersIntf::getNumOfRetroSnapshots()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfRetroSnapshots();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraHDRInBufsIfNeeded()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfExtraHDRInBufsIfNeeded();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraHDROutBufsIfNeeded()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfExtraHDROutBufsIfNeeded();
+}
+
+bool QCameraParametersIntf::getRecordingHintValue()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getRecordingHintValue();
+}
+
+uint32_t QCameraParametersIntf::getJpegQuality()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getJpegQuality();
+}
+
+uint32_t QCameraParametersIntf::getRotation()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getRotation();
+}
+
+uint32_t QCameraParametersIntf::getDeviceRotation()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getDeviceRotation();
+}
+
+uint32_t QCameraParametersIntf::getJpegExifRotation()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getJpegExifRotation();
+}
+
+bool QCameraParametersIntf::useJpegExifRotation()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->useJpegExifRotation();
+}
+
+int32_t QCameraParametersIntf::getEffectValue()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getEffectValue();
+}
+
+bool QCameraParametersIntf::isInstantAECEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isInstantAECEnabled();
+}
+
+bool QCameraParametersIntf::isInstantCaptureEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isInstantCaptureEnabled();
+}
+
+uint8_t QCameraParametersIntf::getAecFrameBoundValue()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getAecFrameBoundValue();
+}
+
+uint8_t QCameraParametersIntf::getAecSkipDisplayFrameBound()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getAecSkipDisplayFrameBound();
+}
+
+int32_t QCameraParametersIntf::getExifDateTime(
+ String8 &dateTime, String8 &subsecTime)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifDateTime(dateTime, subsecTime);
+}
+
+int32_t QCameraParametersIntf::getExifFocalLength(rat_t *focalLength)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifFocalLength(focalLength);
+}
+
+uint16_t QCameraParametersIntf::getExifIsoSpeed()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifIsoSpeed();
+}
+
+int32_t QCameraParametersIntf::getExifGpsProcessingMethod(char *gpsProcessingMethod, uint32_t &count)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifGpsProcessingMethod(gpsProcessingMethod, count);
+}
+
+int32_t QCameraParametersIntf::getExifLatitude(rat_t *latitude, char *latRef)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifLatitude(latitude, latRef);
+}
+
+int32_t QCameraParametersIntf::getExifLongitude(rat_t *longitude, char *lonRef)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifLongitude(longitude, lonRef);
+}
+
+int32_t QCameraParametersIntf::getExifAltitude(rat_t *altitude, char *altRef)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifAltitude(altitude, altRef);
+}
+
+int32_t QCameraParametersIntf::getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen, rat_t *gpsTimeStamp)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifGpsDateTimeStamp(gpsDateStamp, bufLen, gpsTimeStamp);
+}
+
+bool QCameraParametersIntf::isVideoBuffersCached()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isVideoBuffersCached();
+}
+
+int32_t QCameraParametersIntf::updateFocusDistances(cam_focus_distances_info_t *focusDistances)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateFocusDistances(focusDistances);
+}
+
+bool QCameraParametersIntf::isAEBracketEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isAEBracketEnabled();
+}
+
+int32_t QCameraParametersIntf::setAEBracketing()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setAEBracketing();
+}
+
+bool QCameraParametersIntf::isFpsDebugEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isFpsDebugEnabled();
+}
+
+bool QCameraParametersIntf::isHistogramEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHistogramEnabled();
+}
+
+bool QCameraParametersIntf::isSceneSelectionEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isSceneSelectionEnabled();
+}
+
+int32_t QCameraParametersIntf::setSelectedScene(cam_scene_mode_type scene)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setSelectedScene(scene);
+}
+
+cam_scene_mode_type QCameraParametersIntf::getSelectedScene()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getSelectedScene();
+}
+
+bool QCameraParametersIntf::isFaceDetectionEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isFaceDetectionEnabled();
+}
+
+int32_t QCameraParametersIntf::setFaceDetectionOption(bool enabled)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setFaceDetectionOption(enabled);
+}
+
+int32_t QCameraParametersIntf::setHistogram(bool enabled)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setHistogram(enabled);
+}
+
+int32_t QCameraParametersIntf::setFaceDetection(bool enabled, bool initCommit)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setFaceDetection(enabled, initCommit);
+}
+
+int32_t QCameraParametersIntf::setFrameSkip(enum msm_vfe_frame_skip_pattern pattern)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setFrameSkip(pattern);
+}
+
+qcamera_thermal_mode QCameraParametersIntf::getThermalMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getThermalMode();
+}
+
+int32_t QCameraParametersIntf::updateRecordingHintValue(int32_t value)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateRecordingHintValue(value);
+}
+
+int32_t QCameraParametersIntf::setHDRAEBracket(cam_exp_bracketing_t hdrBracket)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setHDRAEBracket(hdrBracket);
+}
+
+bool QCameraParametersIntf::isHDREnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHDREnabled();
+}
+
+bool QCameraParametersIntf::isAutoHDREnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isAutoHDREnabled();
+}
+
+int32_t QCameraParametersIntf::stopAEBracket()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->stopAEBracket();
+}
+
+int32_t QCameraParametersIntf::updateRAW(cam_dimension_t max_dim)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateRAW(max_dim);
+}
+
+bool QCameraParametersIntf::isDISEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isDISEnabled();
+}
+
+cam_is_type_t QCameraParametersIntf::getISType()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getISType();
+}
+
+cam_is_type_t QCameraParametersIntf::getPreviewISType()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getPreviewISType();
+}
+
+uint8_t QCameraParametersIntf::getMobicatMask()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getMobicatMask();
+}
+
+cam_focus_mode_type QCameraParametersIntf::getFocusMode() const
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getFocusMode();
+}
+
+int32_t QCameraParametersIntf::setNumOfSnapshot()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setNumOfSnapshot();
+}
+
+int32_t QCameraParametersIntf::adjustPreviewFpsRange(cam_fps_range_t *fpsRange)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->adjustPreviewFpsRange(fpsRange);
+}
+
+bool QCameraParametersIntf::isJpegPictureFormat()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isJpegPictureFormat();
+}
+
+bool QCameraParametersIntf::isNV16PictureFormat()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isNV16PictureFormat();
+}
+
+bool QCameraParametersIntf::isNV21PictureFormat()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isNV21PictureFormat();
+}
+
+cam_denoise_process_type_t QCameraParametersIntf::getDenoiseProcessPlate(
+ cam_intf_parm_type_t type)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getDenoiseProcessPlate(type);
+}
+
+int32_t QCameraParametersIntf::getMaxPicSize(cam_dimension_t &dim)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getMaxPicSize(dim);
+}
+
+int QCameraParametersIntf::getFlipMode(cam_stream_type_t streamType)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getFlipMode(streamType);
+}
+
+bool QCameraParametersIntf::isSnapshotFDNeeded()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isSnapshotFDNeeded();
+}
+
+bool QCameraParametersIntf::isHDR1xFrameEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHDR1xFrameEnabled();
+}
+
+bool QCameraParametersIntf::isYUVFrameInfoNeeded()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isYUVFrameInfoNeeded();
+}
+
+const char* QCameraParametersIntf::getFrameFmtString(cam_format_t fmt)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getFrameFmtString(fmt);
+}
+
+bool QCameraParametersIntf::isHDR1xExtraBufferNeeded()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHDR1xExtraBufferNeeded();
+}
+
+bool QCameraParametersIntf::isHDROutputCropEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHDROutputCropEnabled();
+}
+
+bool QCameraParametersIntf::isPreviewFlipChanged()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isPreviewFlipChanged();
+}
+
+bool QCameraParametersIntf::isVideoFlipChanged()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isVideoFlipChanged();
+}
+
+bool QCameraParametersIntf::isSnapshotFlipChanged()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isSnapshotFlipChanged();
+}
+
+void QCameraParametersIntf::setHDRSceneEnable(bool bflag)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setHDRSceneEnable(bflag);
+}
+
+int32_t QCameraParametersIntf::updateAWBParams(cam_awb_params_t &awb_params)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateAWBParams(awb_params);
+}
+
+const char * QCameraParametersIntf::getASDStateString(cam_auto_scene_t scene)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getASDStateString(scene);
+}
+
+bool QCameraParametersIntf::isHDRThumbnailProcessNeeded()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHDRThumbnailProcessNeeded();
+}
+
+void QCameraParametersIntf::setMinPpMask(cam_feature_mask_t min_pp_mask)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setMinPpMask(min_pp_mask);
+}
+
+bool QCameraParametersIntf::setStreamConfigure(bool isCapture,
+ bool previewAsPostview, bool resetConfig)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setStreamConfigure(isCapture,
+ previewAsPostview, resetConfig);
+}
+
+int32_t QCameraParametersIntf::addOnlineRotation(uint32_t rotation,
+ uint32_t streamId, int32_t device_rotation)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->addOnlineRotation(rotation, streamId, device_rotation);
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraBuffersForImageProc()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfExtraBuffersForImageProc();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraBuffersForVideo()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfExtraBuffersForVideo();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraBuffersForPreview()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumOfExtraBuffersForPreview();
+}
+
+uint32_t QCameraParametersIntf::getExifBufIndex(uint32_t captureIndex)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExifBufIndex(captureIndex);
+}
+
+bool QCameraParametersIntf::needThumbnailReprocess(cam_feature_mask_t *pFeatureMask)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->needThumbnailReprocess(pFeatureMask);
+}
+
+bool QCameraParametersIntf::isUbiFocusEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isUbiFocusEnabled();
+}
+
+bool QCameraParametersIntf::isChromaFlashEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isChromaFlashEnabled();
+}
+
+bool QCameraParametersIntf::isHighQualityNoiseReductionMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isHighQualityNoiseReductionMode();
+}
+
+bool QCameraParametersIntf::isTruePortraitEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isTruePortraitEnabled();
+}
+
+size_t QCameraParametersIntf::getTPMaxMetaSize()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getTPMaxMetaSize();
+}
+
+bool QCameraParametersIntf::isSeeMoreEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isSeeMoreEnabled();
+}
+
+bool QCameraParametersIntf::isStillMoreEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isStillMoreEnabled();
+}
+
+bool QCameraParametersIntf::isOptiZoomEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isOptiZoomEnabled();
+}
+
+int32_t QCameraParametersIntf::commitAFBracket(cam_af_bracketing_t afBracket)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->commitAFBracket(afBracket);
+}
+
+
+int32_t QCameraParametersIntf::set3ALock(bool lock3A)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->set3ALock(lock3A);
+}
+
+int32_t QCameraParametersIntf::setAndCommitZoom(int zoom_level)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setAndCommitZoom(zoom_level);
+}
+uint8_t QCameraParametersIntf::getBurstCountForAdvancedCapture()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getBurstCountForAdvancedCapture();
+}
+uint32_t QCameraParametersIntf::getNumberInBufsForSingleShot()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumberInBufsForSingleShot();
+}
+uint32_t QCameraParametersIntf::getNumberOutBufsForSingleShot()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getNumberOutBufsForSingleShot();
+}
+int32_t QCameraParametersIntf::setLongshotEnable(bool enable)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setLongshotEnable(enable);
+}
+String8 QCameraParametersIntf::dump()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->dump();
+}
+bool QCameraParametersIntf::isUbiRefocus()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isUbiRefocus();
+}
+uint32_t QCameraParametersIntf::getRefocusMaxMetaSize()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getRefocusMaxMetaSize();
+}
+uint8_t QCameraParametersIntf::getRefocusOutputCount()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getRefocusOutputCount();
+}
+
+bool QCameraParametersIntf::generateThumbFromMain()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->generateThumbFromMain();
+}
+
+void QCameraParametersIntf::updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->updateCurrentFocusPosition(cur_pos_info);
+}
+
+void QCameraParametersIntf::updateAEInfo(cam_3a_params_t &ae_params)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->updateAEInfo(ae_params);
+}
+
+bool QCameraParametersIntf::isAdvCamFeaturesEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isAdvCamFeaturesEnabled();
+}
+
+int32_t QCameraParametersIntf::setAecLock(const char *aecStr)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setAecLock(aecStr);
+}
+
+int32_t QCameraParametersIntf::updateDebugLevel()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateDebugLevel();
+}
+
+bool QCameraParametersIntf::is4k2kVideoResolution()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->is4k2kVideoResolution();
+}
+
+bool QCameraParametersIntf::isUBWCEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isUBWCEnabled();
+}
+int QCameraParametersIntf::getBrightness()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getBrightness();
+}
+
+int32_t QCameraParametersIntf::updateOisValue(bool oisValue)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateOisValue(oisValue);
+}
+
+int32_t QCameraParametersIntf::setIntEvent(cam_int_evt_params_t params)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setIntEvent(params);
+}
+
+bool QCameraParametersIntf::getofflineRAW()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getofflineRAW();
+}
+
+bool QCameraParametersIntf::getQuadraCfa()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getQuadraCfa();
+}
+
+int32_t QCameraParametersIntf::updatePpFeatureMask(cam_stream_type_t stream_type)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updatePpFeatureMask(stream_type);
+}
+
+int32_t QCameraParametersIntf::getStreamPpMask(cam_stream_type_t stream_type,
+ cam_feature_mask_t &pp_mask)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getStreamPpMask(stream_type, pp_mask);
+}
+
+int32_t QCameraParametersIntf::getSharpness()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getSharpness();
+}
+
+int32_t QCameraParametersIntf::getEffect()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getEffect();
+}
+
+int32_t QCameraParametersIntf::updateFlashMode(cam_flash_mode_t flash_mode)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateFlashMode(flash_mode);
+}
+
+int32_t QCameraParametersIntf::configureAEBracketing(cam_capture_frame_config_t &frame_config)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->configureAEBracketing(frame_config);
+}
+
+int32_t QCameraParametersIntf::configureHDRBracketing(cam_capture_frame_config_t &frame_config)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->configureHDRBracketing(frame_config);
+}
+
+int32_t QCameraParametersIntf::configFrameCapture(bool commitSettings)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->configFrameCapture(commitSettings);
+}
+
+int32_t QCameraParametersIntf::resetFrameCapture(bool commitSettings)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->resetFrameCapture(commitSettings);
+}
+
+cam_still_more_t QCameraParametersIntf::getStillMoreSettings()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getStillMoreSettings();
+}
+
+void QCameraParametersIntf::setStillMoreSettings(cam_still_more_t stillmore_config)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setStillMoreSettings(stillmore_config);
+}
+
+cam_still_more_t QCameraParametersIntf::getStillMoreCapability()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getStillMoreCapability();
+}
+
+cam_dyn_img_data_t QCameraParametersIntf::getDynamicImgData()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getDynamicImgData();
+}
+
+void QCameraParametersIntf::setDynamicImgData(cam_dyn_img_data_t d)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setDynamicImgData(d);
+}
+
+int32_t QCameraParametersIntf::getParmZoomLevel()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getParmZoomLevel();
+}
+
+
+int8_t QCameraParametersIntf::getReprocCount()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getReprocCount();
+}
+
+
+int8_t QCameraParametersIntf::getCurPPCount()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getCurPPCount();
+}
+
+
+void QCameraParametersIntf::setReprocCount()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setReprocCount();
+}
+
+
+bool QCameraParametersIntf::isPostProcScaling()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isPostProcScaling();
+}
+
+
+bool QCameraParametersIntf::isLLNoiseEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isLLNoiseEnabled();
+}
+
+
+void QCameraParametersIntf::setCurPPCount(int8_t count)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setCurPPCount(count);
+}
+
+int32_t QCameraParametersIntf::setQuadraCfaMode(uint32_t value, bool initCommit)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setQuadraCfaMode(value, initCommit);
+}
+
+int32_t QCameraParametersIntf::setToneMapMode(uint32_t value, bool initCommit)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setToneMapMode(value, initCommit);
+}
+
+void QCameraParametersIntf::setTintless(bool enable)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setTintless(enable);
+}
+
+uint8_t QCameraParametersIntf::getLongshotStages()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getLongshotStages();
+}
+
+int8_t QCameraParametersIntf::getBufBatchCount()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getBufBatchCount();
+}
+
+int8_t QCameraParametersIntf::getVideoBatchSize()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getVideoBatchSize();
+}
+
+int32_t QCameraParametersIntf::setManualCaptureMode(
+ QCameraManualCaptureModes value)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setManualCaptureMode(value);
+}
+
+QCameraManualCaptureModes QCameraParametersIntf::getManualCaptureMode()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getManualCaptureMode();
+}
+
+int64_t QCameraParametersIntf::getExposureTime()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getExposureTime();
+}
+
+cam_capture_frame_config_t QCameraParametersIntf::getCaptureFrameConfig()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getCaptureFrameConfig();
+}
+
+void QCameraParametersIntf::setJpegRotation(int rotation)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setJpegRotation(rotation);
+}
+
+uint32_t QCameraParametersIntf::getJpegRotation()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getJpegRotation();
+}
+
+void QCameraParametersIntf::setLowLightLevel(cam_low_light_mode_t value)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ mImpl->setLowLightLevel(value);
+}
+
+cam_low_light_mode_t QCameraParametersIntf::getLowLightLevel()
+{
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getLowLightLevel();
+}
+
+bool QCameraParametersIntf::getLowLightCapture()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getLowLightCapture();
+}
+
+bool QCameraParametersIntf::getDcrf()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getDcrf();
+}
+
+int32_t QCameraParametersIntf::setRelatedCamSyncInfo(
+ cam_sync_related_sensors_event_info_t* info)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setRelatedCamSyncInfo(info);
+}
+
+const cam_sync_related_sensors_event_info_t*
+ QCameraParametersIntf::getRelatedCamSyncInfo(void)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getRelatedCamSyncInfo();
+}
+
+int32_t QCameraParametersIntf::setFrameSyncEnabled(
+ bool enable)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setFrameSyncEnabled(enable);
+}
+
+bool QCameraParametersIntf::isFrameSyncEnabled(void)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isFrameSyncEnabled();
+}
+
+int32_t QCameraParametersIntf::getRelatedCamCalibration(
+ cam_related_system_calibration_data_t* calib)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getRelatedCamCalibration(calib);
+}
+
+int32_t QCameraParametersIntf::bundleRelatedCameras(bool sync, uint32_t sessionid)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->bundleRelatedCameras(sync, sessionid);
+}
+
+uint8_t QCameraParametersIntf::fdModeInVideo()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->fdModeInVideo();
+}
+
+bool QCameraParametersIntf::isOEMFeatEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isOEMFeatEnabled();
+}
+
+int32_t QCameraParametersIntf::setZslMode(bool value)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setZslMode(value);
+}
+
+int32_t QCameraParametersIntf::updateZSLModeValue(bool value)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->updateZSLModeValue(value);
+}
+
+bool QCameraParametersIntf::isReprocScaleEnabled()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isReprocScaleEnabled();
+}
+
+bool QCameraParametersIntf::isUnderReprocScaling()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->isUnderReprocScaling();
+}
+
+int32_t QCameraParametersIntf::getPicSizeFromAPK(int &width, int &height)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getPicSizeFromAPK(width, height);
+}
+
+int32_t QCameraParametersIntf::checkFeatureConcurrency()
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->checkFeatureConcurrency();
+}
+
+int32_t QCameraParametersIntf::setInstantAEC(uint8_t enable, bool initCommit)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->setInstantAEC(enable, initCommit);
+}
+
+int32_t QCameraParametersIntf::getAnalysisInfo(
+ bool fdVideoEnabled,
+ bool hal3,
+ cam_feature_mask_t featureMask,
+ cam_analysis_info_t *pAnalysisInfo)
+{
+ Mutex::Autolock lock(mLock);
+ CHECK_PARAM_INTF(mImpl);
+ return mImpl->getAnalysisInfo(fdVideoEnabled, hal3, featureMask, pAnalysisInfo);
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraParametersIntf.h b/camera/QCamera2/HAL/QCameraParametersIntf.h
new file mode 100644
index 0000000..0bd7e73
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraParametersIntf.h
@@ -0,0 +1,310 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_INTF_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_INTF_H
+
+#include <utils/String8.h>
+#include <utils/Mutex.h>
+#include "cam_intf.h"
+#include "cam_types.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+typedef cam_manual_capture_type QCameraManualCaptureModes;
+
+class QCameraAdjustFPS
+{
+public:
+ virtual int recalcFPSRange(int &minFPS, int &maxFPS,
+ const float &minVideoFPS, const float &maxVideoFPs,
+ cam_fps_range_t &adjustedRange) = 0;
+ virtual ~QCameraAdjustFPS() {}
+};
+
+class QCameraParameters;
+
+class QCameraParametersIntf
+{
+public:
+
+ // member variables
+ QCameraParametersIntf();
+ ~QCameraParametersIntf();
+
+ int32_t allocate();
+ int32_t init(cam_capability_t *capabilities,
+ mm_camera_vtbl_t *mmOps,
+ QCameraAdjustFPS *adjustFPS);
+
+ void deinit();
+ int32_t updateParameters(const String8& params, bool &needRestart);
+ int32_t commitParameters();
+
+ char* getParameters();
+ void getPreviewFpsRange(int *min_fps, int *max_fps) const;
+#ifdef TARGET_TS_MAKEUP
+ bool getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const;
+#endif
+
+ int getPreviewHalPixelFormat();
+ int32_t getStreamRotation(cam_stream_type_t streamType,
+ cam_pp_feature_config_t &featureConfig,
+ cam_dimension_t &dim);
+ int32_t getStreamFormat(cam_stream_type_t streamType,
+ cam_format_t &format);
+ int32_t getStreamDimension(cam_stream_type_t streamType,
+ cam_dimension_t &dim);
+
+ void getThumbnailSize(int *width, int *height) const;
+ uint8_t getZSLBurstInterval();
+ uint8_t getZSLQueueDepth();
+ uint8_t getZSLBackLookCount();
+ uint8_t getMaxUnmatchedFramesInQueue();
+ bool isZSLMode();
+ bool isRdiMode();
+ bool isSecureMode();
+ bool isNoDisplayMode();
+ bool isWNREnabled();
+ bool isTNRSnapshotEnabled();
+ int32_t getCDSMode();
+ bool isLTMForSeeMoreEnabled();
+ bool isHfrMode();
+ void getHfrFps(cam_fps_range_t &pFpsRange);
+ uint8_t getNumOfSnapshots();
+ uint8_t getNumOfRetroSnapshots();
+ uint8_t getNumOfExtraHDRInBufsIfNeeded();
+ uint8_t getNumOfExtraHDROutBufsIfNeeded();
+
+ bool getRecordingHintValue();
+ uint32_t getJpegQuality();
+ uint32_t getRotation();
+ uint32_t getDeviceRotation();
+ uint32_t getJpegExifRotation();
+ bool useJpegExifRotation();
+ int32_t getEffectValue();
+ bool isInstantAECEnabled();
+ bool isInstantCaptureEnabled();
+ uint8_t getAecFrameBoundValue();
+ uint8_t getAecSkipDisplayFrameBound();
+
+ int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
+ int32_t getExifFocalLength(rat_t *focalLenght);
+ uint16_t getExifIsoSpeed();
+ int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
+ uint32_t &count);
+ int32_t getExifLatitude(rat_t *latitude, char *latRef);
+ int32_t getExifLongitude(rat_t *longitude, char *lonRef);
+ int32_t getExifAltitude(rat_t *altitude, char *altRef);
+ int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
+ uint32_t bufLen, rat_t *gpsTimeStamp);
+ bool isVideoBuffersCached();
+ int32_t updateFocusDistances(cam_focus_distances_info_t *focusDistances);
+
+ bool isAEBracketEnabled();
+ int32_t setAEBracketing();
+ bool isFpsDebugEnabled();
+ bool isHistogramEnabled();
+ bool isSceneSelectionEnabled();
+ int32_t setSelectedScene(cam_scene_mode_type scene);
+ cam_scene_mode_type getSelectedScene();
+ bool isFaceDetectionEnabled();
+ int32_t setFaceDetectionOption(bool enabled);
+ int32_t setHistogram(bool enabled);
+ int32_t setFaceDetection(bool enabled, bool initCommit);
+ int32_t setFrameSkip(enum msm_vfe_frame_skip_pattern pattern);
+ qcamera_thermal_mode getThermalMode();
+ int32_t updateRecordingHintValue(int32_t value);
+ int32_t setHDRAEBracket(cam_exp_bracketing_t hdrBracket);
+ bool isHDREnabled();
+ bool isAutoHDREnabled();
+ int32_t stopAEBracket();
+ int32_t updateRAW(cam_dimension_t max_dim);
+ bool isDISEnabled();
+ cam_is_type_t getISType();
+ cam_is_type_t getPreviewISType();
+ uint8_t getMobicatMask();
+
+ cam_focus_mode_type getFocusMode() const;
+ int32_t setNumOfSnapshot();
+ int32_t adjustPreviewFpsRange(cam_fps_range_t *fpsRange);
+ bool isJpegPictureFormat();
+ bool isNV16PictureFormat();
+ bool isNV21PictureFormat();
+ cam_denoise_process_type_t getDenoiseProcessPlate(cam_intf_parm_type_t type);
+ int32_t getMaxPicSize(cam_dimension_t &dim);
+ int getFlipMode(cam_stream_type_t streamType);
+ bool isSnapshotFDNeeded();
+
+ bool isHDR1xFrameEnabled();
+ bool isYUVFrameInfoNeeded();
+ const char*getFrameFmtString(cam_format_t fmt);
+ bool isHDR1xExtraBufferNeeded();
+ bool isHDROutputCropEnabled();
+
+ bool isPreviewFlipChanged();
+ bool isVideoFlipChanged();
+ bool isSnapshotFlipChanged();
+ void setHDRSceneEnable(bool bflag);
+ int32_t updateAWBParams(cam_awb_params_t &awb_params);
+
+ const char *getASDStateString(cam_auto_scene_t scene);
+ bool isHDRThumbnailProcessNeeded();
+ void setMinPpMask(cam_feature_mask_t min_pp_mask);
+ bool setStreamConfigure(bool isCapture,
+ bool previewAsPostview, bool resetConfig);
+ int32_t addOnlineRotation(uint32_t rotation, uint32_t streamId,
+ int32_t device_rotation);
+ uint8_t getNumOfExtraBuffersForImageProc();
+ uint8_t getNumOfExtraBuffersForVideo();
+ uint8_t getNumOfExtraBuffersForPreview();
+ uint32_t getExifBufIndex(uint32_t captureIndex);
+ bool needThumbnailReprocess(cam_feature_mask_t *pFeatureMask);
+ bool isUbiFocusEnabled();
+ bool isChromaFlashEnabled();
+ bool isHighQualityNoiseReductionMode();
+ bool isTruePortraitEnabled();
+ size_t getTPMaxMetaSize();
+ bool isSeeMoreEnabled();
+ bool isStillMoreEnabled();
+ bool isOptiZoomEnabled();
+
+ int32_t commitAFBracket(cam_af_bracketing_t afBracket);
+ int32_t set3ALock(bool lock3A);
+ int32_t setAndCommitZoom(int zoom_level);
+ uint8_t getBurstCountForAdvancedCapture();
+ uint32_t getNumberInBufsForSingleShot();
+ uint32_t getNumberOutBufsForSingleShot();
+ int32_t setLongshotEnable(bool enable);
+ String8 dump();
+ bool isUbiRefocus();
+ uint32_t getRefocusMaxMetaSize();
+ uint8_t getRefocusOutputCount();
+ bool generateThumbFromMain();
+ void updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info);
+ void updateAEInfo(cam_3a_params_t &ae_params);
+ bool isDisplayFrameNeeded();
+ bool isAdvCamFeaturesEnabled();
+ int32_t setAecLock(const char *aecStr);
+ int32_t updateDebugLevel();
+ bool is4k2kVideoResolution();
+ bool isUBWCEnabled();
+
+ int getBrightness();
+ int32_t updateOisValue(bool oisValue);
+ int32_t setIntEvent(cam_int_evt_params_t params);
+ bool getofflineRAW();
+ bool getQuadraCfa();
+ int32_t updatePpFeatureMask(cam_stream_type_t stream_type);
+ int32_t getStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t &pp_mask);
+ int32_t getSharpness();
+ int32_t getEffect();
+ int32_t updateFlashMode(cam_flash_mode_t flash_mode);
+ int32_t configureAEBracketing(cam_capture_frame_config_t &frame_config);
+ int32_t configureHDRBracketing(cam_capture_frame_config_t &frame_config);
+ int32_t configFrameCapture(bool commitSettings);
+ int32_t resetFrameCapture(bool commitSettings);
+ cam_still_more_t getStillMoreSettings();
+ void setStillMoreSettings(cam_still_more_t stillmore_config);
+ cam_still_more_t getStillMoreCapability();
+ cam_dyn_img_data_t getDynamicImgData();
+ void setDynamicImgData(cam_dyn_img_data_t d);
+
+ int32_t getParmZoomLevel();
+ int8_t getReprocCount();
+ int8_t getCurPPCount();
+ void setReprocCount();
+ bool isPostProcScaling();
+ bool isLLNoiseEnabled();
+ void setCurPPCount(int8_t count);
+ int32_t setQuadraCfaMode(uint32_t value, bool initCommit);
+ int32_t setToneMapMode(uint32_t value, bool initCommit);
+ void setTintless(bool enable);
+ uint8_t getLongshotStages();
+ int8_t getBufBatchCount();
+ int8_t getVideoBatchSize();
+
+ int32_t setManualCaptureMode(
+ QCameraManualCaptureModes value = CAM_MANUAL_CAPTURE_TYPE_OFF);
+ QCameraManualCaptureModes getManualCaptureMode();
+ int64_t getExposureTime();
+
+ cam_capture_frame_config_t getCaptureFrameConfig();
+ void setJpegRotation(int rotation);
+ uint32_t getJpegRotation();
+
+ void setLowLightLevel(cam_low_light_mode_t value);
+ cam_low_light_mode_t getLowLightLevel();
+ bool getLowLightCapture();
+
+ /* Dual camera specific */
+ bool getDcrf();
+ int32_t setRelatedCamSyncInfo(
+ cam_sync_related_sensors_event_info_t* info);
+ const cam_sync_related_sensors_event_info_t*
+ getRelatedCamSyncInfo(void);
+ int32_t setFrameSyncEnabled(bool enable);
+ bool isFrameSyncEnabled(void);
+ int32_t getRelatedCamCalibration(
+ cam_related_system_calibration_data_t* calib);
+ int32_t bundleRelatedCameras(bool sync, uint32_t sessionid);
+ uint8_t fdModeInVideo();
+ bool isOEMFeatEnabled();
+
+ int32_t setZslMode(bool value);
+ int32_t updateZSLModeValue(bool value);
+
+ bool isReprocScaleEnabled();
+ bool isUnderReprocScaling();
+ int32_t getPicSizeFromAPK(int &width, int &height);
+
+ int32_t checkFeatureConcurrency();
+ int32_t setInstantAEC(uint8_t enable, bool initCommit);
+
+ int32_t getAnalysisInfo(
+ bool fdVideoEnabled,
+ bool hal3,
+ cam_feature_mask_t featureMask,
+ cam_analysis_info_t *pAnalysisInfo);
+private:
+ QCameraParameters *mImpl;
+ mutable Mutex mLock;
+};
+
+}; // namespace qcamera
+
+#endif
diff --git a/camera/QCamera2/HAL/QCameraPostProc.cpp b/camera/QCamera2/HAL/QCameraPostProc.cpp
new file mode 100644
index 0000000..c14b0d9
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraPostProc.cpp
@@ -0,0 +1,3661 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraPostProc"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraPostProc.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+const char *QCameraPostProcessor::STORE_LOCATION = "/sdcard/img_%d.jpg";
+
+#define FREE_JPEG_OUTPUT_BUFFER(ptr,cnt) \
+ int jpeg_bufs; \
+ for (jpeg_bufs = 0; jpeg_bufs < (int)cnt; jpeg_bufs++) { \
+ if (ptr[jpeg_bufs] != NULL) { \
+ free(ptr[jpeg_bufs]); \
+ ptr[jpeg_bufs] = NULL; \
+ } \
+ }
+
+/*===========================================================================
+ * FUNCTION : QCameraPostProcessor
+ *
+ * DESCRIPTION: constructor of QCameraPostProcessor.
+ *
+ * PARAMETERS :
+ * @cam_ctrl : ptr to HWI object
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
+ : m_parent(cam_ctrl),
+ mJpegCB(NULL),
+ mJpegUserData(NULL),
+ mJpegClientHandle(0),
+ mJpegSessionId(0),
+ m_pJpegExifObj(NULL),
+ m_bThumbnailNeeded(TRUE),
+ mPPChannelCount(0),
+ m_bInited(FALSE),
+ m_inputPPQ(releaseOngoingPPData, this),
+ m_ongoingPPQ(releaseOngoingPPData, this),
+ m_inputJpegQ(releaseJpegData, this),
+ m_ongoingJpegQ(releaseJpegData, this),
+ m_inputRawQ(releaseRawData, this),
+ mSaveFrmCnt(0),
+ mUseSaveProc(false),
+ mUseJpegBurst(false),
+ mJpegMemOpt(true),
+ m_JpegOutputMemCount(0),
+ mNewJpegSessionNeeded(true),
+ m_bufCountPPQ(0),
+ m_PPindex(0)
+{
+ memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+ memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
+ memset(&m_pJpegOutputMem, 0, sizeof(m_pJpegOutputMem));
+ memset(mPPChannels, 0, sizeof(mPPChannels));
+ m_DataMem = NULL;
+ mOfflineDataBufs = NULL;
+ pthread_mutex_init(&m_reprocess_lock,NULL);
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraPostProcessor
+ *
+ * DESCRIPTION: deconstructor of QCameraPostProcessor.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraPostProcessor::~QCameraPostProcessor()
+{
+ FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem,m_JpegOutputMemCount);
+ if (m_pJpegExifObj != NULL) {
+ delete m_pJpegExifObj;
+ m_pJpegExifObj = NULL;
+ }
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ QCameraChannel *pChannel = mPPChannels[i];
+ if ( pChannel != NULL ) {
+ pChannel->stop();
+ delete pChannel;
+ pChannel = NULL;
+ }
+ }
+ mPPChannelCount = 0;
+ pthread_mutex_destroy(&m_reprocess_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : setJpegHandle
+ *
+ * DESCRIPTION: set JPEG client handles
+ *
+ * PARAMETERS :
+ * @pJpegHandle : JPEG ops handle
+ * @pJpegMpoHandle : MPO JPEG ops handle
+ * @clientHandle : JPEG client handle
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
+ mm_jpeg_mpo_ops_t *pJpegMpoHandle, uint32_t clientHandle)
+{
+ LOGH("E mJpegClientHandle: %d, clientHandle: %d",
+ mJpegClientHandle, clientHandle);
+
+ if(pJpegHandle) {
+ memcpy(&mJpegHandle, pJpegHandle, sizeof(mm_jpeg_ops_t));
+ }
+
+ if(pJpegMpoHandle) {
+ memcpy(&mJpegMpoHandle, pJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
+ }
+ mJpegClientHandle = clientHandle;
+ LOGH("X mJpegClientHandle: %d, clientHandle: %d",
+ mJpegClientHandle, clientHandle);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : init
+ *
+ * DESCRIPTION: initialization of postprocessor
+ *
+ * PARAMETERS :
+ * @jpeg_cb : callback to handle jpeg event from mm-camera-interface
+ * @user_data : user data ptr for jpeg callback
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
+{
+ mJpegCB = jpeg_cb;
+ mJpegUserData = user_data;
+ m_dataProcTh.launch(dataProcessRoutine, this);
+ m_saveProcTh.launch(dataSaveRoutine, this);
+ m_parent->mParameters.setReprocCount();
+ m_bInited = TRUE;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : deinit
+ *
+ * DESCRIPTION: de-initialization of postprocessor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::deinit()
+{
+ if (m_bInited == TRUE) {
+ m_dataProcTh.exit();
+ m_saveProcTh.exit();
+ m_bInited = FALSE;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : start
+ *
+ * DESCRIPTION: start postprocessor. Data process thread and data notify thread
+ * will be launched.
+ *
+ * PARAMETERS :
+ * @pSrcChannel : source channel obj ptr that possibly needs reprocess
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : if any reprocess is needed, a reprocess channel/stream
+ * will be started.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ int32_t rc = NO_ERROR;
+ QCameraChannel *pInputChannel = pSrcChannel;
+
+ LOGH("E ");
+ if (m_bInited == FALSE) {
+ LOGE("postproc not initialized yet");
+ return UNKNOWN_ERROR;
+ }
+
+ if (m_DataMem != NULL) {
+ m_DataMem->release(m_DataMem);
+ m_DataMem = NULL;
+ }
+
+ if (pInputChannel == NULL) {
+ LOGE("Input Channel for pproc is NULL.");
+ return UNKNOWN_ERROR;
+ }
+
+ if ( m_parent->needReprocess() ) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ // Delete previous reproc channel
+ QCameraReprocessChannel *pChannel = mPPChannels[i];
+ if (pChannel != NULL) {
+ pChannel->stop();
+ delete pChannel;
+ pChannel = NULL;
+ }
+ }
+ mPPChannelCount = 0;
+
+ m_bufCountPPQ = 0;
+ if (!m_parent->isLongshotEnabled()) {
+ m_parent->mParameters.setReprocCount();
+ }
+
+ if (m_parent->mParameters.getManualCaptureMode() >=
+ CAM_MANUAL_CAPTURE_TYPE_3) {
+ mPPChannelCount = m_parent->mParameters.getReprocCount() - 1;
+ } else {
+ mPPChannelCount = m_parent->mParameters.getReprocCount();
+ }
+
+ // Create all reproc channels and start channel
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ mPPChannels[i] = m_parent->addReprocChannel(pInputChannel, i);
+ if (mPPChannels[i] == NULL) {
+ LOGE("cannot add multi reprocess channel i = %d", i);
+ return UNKNOWN_ERROR;
+ }
+ rc = mPPChannels[i]->start();
+ if (rc != 0) {
+ LOGE("cannot start multi reprocess channel i = %d", i);
+ delete mPPChannels[i];
+ mPPChannels[i] = NULL;
+ return UNKNOWN_ERROR;
+ }
+ pInputChannel = static_cast<QCameraChannel *>(mPPChannels[i]);
+ }
+ }
+
+ property_get("persist.camera.longshot.save", prop, "0");
+ mUseSaveProc = atoi(prop) > 0 ? true : false;
+
+ m_PPindex = 0;
+ m_InputMetadata.clear();
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
+ m_parent->m_cbNotifier.startSnapshots();
+ LOGH("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stop
+ *
+ * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : reprocess channel will be stopped and deleted if there is any
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stop()
+{
+ if (m_bInited == TRUE) {
+ m_parent->m_cbNotifier.stopSnapshots();
+
+ if (m_DataMem != NULL) {
+ m_DataMem->release(m_DataMem);
+ m_DataMem = NULL;
+ }
+
+ // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+ }
+ // stop reproc channel if exists
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ QCameraReprocessChannel *pChannel = mPPChannels[i];
+ if (pChannel != NULL) {
+ pChannel->stop();
+ delete pChannel;
+ pChannel = NULL;
+ }
+ }
+ mPPChannelCount = 0;
+ m_PPindex = 0;
+ m_InputMetadata.clear();
+
+ if (mOfflineDataBufs != NULL) {
+ mOfflineDataBufs->deallocate();
+ delete mOfflineDataBufs;
+ mOfflineDataBufs = NULL;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : createJpegSession
+ *
+ * DESCRIPTION: start JPEG session in parallel to reproces to reduce the KPI
+ *
+ * PARAMETERS :
+ * @pSrcChannel : source channel obj ptr that possibly needs reprocess
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::createJpegSession(QCameraChannel *pSrcChannel)
+{
+ int32_t rc = NO_ERROR;
+
+ LOGH("E ");
+ if (m_bInited == FALSE) {
+ LOGE("postproc not initialized yet");
+ return UNKNOWN_ERROR;
+ }
+
+ if (pSrcChannel == NULL) {
+ LOGE("Input Channel for pproc is NULL.");
+ return UNKNOWN_ERROR;
+ }
+
+ if (mPPChannelCount > 0) {
+ QCameraChannel *pChannel = NULL;
+ int ppChannel_idx = mPPChannelCount - 1;
+ pChannel = m_parent->needReprocess() ? mPPChannels[ppChannel_idx] :
+ pSrcChannel;
+ QCameraStream *pSnapshotStream = NULL;
+ QCameraStream *pThumbStream = NULL;
+ bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
+ (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
+ m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
+ !m_parent->mParameters.generateThumbFromMain());
+
+ if (pChannel == NULL) {
+ LOGE("Input Channel for pproc is NULL for index %d.",
+ ppChannel_idx);
+ return UNKNOWN_ERROR;
+ }
+
+ for (uint32_t i = 0; i < pChannel->getNumOfStreams(); ++i) {
+ QCameraStream *pStream = pChannel->getStreamByIndex(i);
+
+ if ( NULL == pStream ) {
+ break;
+ }
+
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ pSnapshotStream = pStream;
+ }
+
+ if ((thumb_stream_needed) &&
+ (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+ pThumbStream = pStream;
+ }
+ }
+
+ // If thumbnail is not part of the reprocess channel, then
+ // try to get it from the source channel
+ if ((thumb_stream_needed) && (NULL == pThumbStream) &&
+ (pChannel == mPPChannels[ppChannel_idx])) {
+ for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); ++i) {
+ QCameraStream *pStream = pSrcChannel->getStreamByIndex(i);
+
+ if ( NULL == pStream ) {
+ break;
+ }
+
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+ pThumbStream = pStream;
+ }
+ }
+ }
+
+ if ( NULL != pSnapshotStream ) {
+ mm_jpeg_encode_params_t encodeParam;
+ memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+ rc = getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream);
+ if (rc != NO_ERROR) {
+ LOGE("error getting encoding config");
+ return rc;
+ }
+ LOGH("[KPI Perf] : call jpeg create_session");
+
+ rc = mJpegHandle.create_session(mJpegClientHandle,
+ &encodeParam,
+ &mJpegSessionId);
+ if (rc != NO_ERROR) {
+ LOGE("error creating a new jpeg encoding session");
+ return rc;
+ }
+ mNewJpegSessionNeeded = false;
+ }
+ }
+ LOGH("X ");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegEncodingConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ * @encode_parm : param to be filled with encoding configuration
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+ QCameraStream *main_stream,
+ QCameraStream *thumb_stream)
+{
+ LOGD("E");
+ int32_t ret = NO_ERROR;
+ size_t out_size;
+
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.jpeg_burst", prop, "0");
+ mUseJpegBurst = (atoi(prop) > 0) && !mUseSaveProc;
+ encode_parm.burst_mode = mUseJpegBurst;
+
+ cam_rect_t crop;
+ memset(&crop, 0, sizeof(cam_rect_t));
+ main_stream->getCropInfo(crop);
+
+ cam_dimension_t src_dim, dst_dim;
+ memset(&src_dim, 0, sizeof(cam_dimension_t));
+ memset(&dst_dim, 0, sizeof(cam_dimension_t));
+ main_stream->getFrameDimension(src_dim);
+
+ bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
+ if (hdr_output_crop && crop.height) {
+ dst_dim.height = crop.height;
+ } else {
+ dst_dim.height = src_dim.height;
+ }
+ if (hdr_output_crop && crop.width) {
+ dst_dim.width = crop.width;
+ } else {
+ dst_dim.width = src_dim.width;
+ }
+
+ // set rotation only when no online rotation or offline pp rotation is done before
+ if (!m_parent->needRotationReprocess()) {
+ encode_parm.rotation = m_parent->mParameters.getJpegRotation();
+ }
+
+ encode_parm.main_dim.src_dim = src_dim;
+ encode_parm.main_dim.dst_dim = dst_dim;
+
+ m_dst_dim = dst_dim;
+
+ encode_parm.jpeg_cb = mJpegCB;
+ encode_parm.userdata = mJpegUserData;
+
+ m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
+ // system property to disable the thumbnail encoding in order to reduce the power
+ // by default thumbnail encoding is set to TRUE and explicitly set this property to
+ // disable the thumbnail encoding
+ property_get("persist.camera.tn.disable", prop, "0");
+ if (atoi(prop) == 1) {
+ m_bThumbnailNeeded = FALSE;
+ LOGH("m_bThumbnailNeeded is %d", m_bThumbnailNeeded);
+ }
+ cam_dimension_t thumbnailSize;
+ memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
+ m_parent->getThumbnailSize(thumbnailSize);
+ if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
+ // (0,0) means no thumbnail
+ m_bThumbnailNeeded = FALSE;
+ }
+ encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+ // get color format
+ cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
+ main_stream->getFormat(img_fmt);
+ encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+ // get jpeg quality
+ uint32_t val = m_parent->getJpegQuality();
+ if (0U < val) {
+ encode_parm.quality = val;
+ } else {
+ LOGH("Using default JPEG quality");
+ encode_parm.quality = 85;
+ }
+ cam_frame_len_offset_t main_offset;
+ memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
+ main_stream->getFrameOffset(main_offset);
+
+ // src buf config
+ QCameraMemory *pStreamMem = main_stream->getStreamBufs();
+ if (pStreamMem == NULL) {
+ LOGE("cannot get stream bufs from main stream");
+ ret = BAD_VALUE;
+ goto on_error;
+ }
+ encode_parm.num_src_bufs = pStreamMem->getCnt();
+ for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
+ camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+ if (stream_mem != NULL) {
+ encode_parm.src_main_buf[i].index = i;
+ encode_parm.src_main_buf[i].buf_size = stream_mem->size;
+ encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+ encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
+ encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
+ encode_parm.src_main_buf[i].offset = main_offset;
+ }
+ }
+ LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
+ "src_dim = %dX%d dst_dim = %dX%d",
+ encode_parm.num_src_bufs,
+ main_offset.mp[0].width, main_offset.mp[0].height,
+ main_offset.frame_len, encode_parm.rotation,
+ src_dim.width, src_dim.height,
+ dst_dim.width, dst_dim.height);
+
+ if (m_bThumbnailNeeded == TRUE) {
+ m_parent->getThumbnailSize(encode_parm.thumb_dim.dst_dim);
+
+ if (thumb_stream == NULL) {
+ thumb_stream = main_stream;
+ }
+ if (((90 == m_parent->mParameters.getJpegRotation())
+ || (270 == m_parent->mParameters.getJpegRotation()))
+ && (m_parent->needRotationReprocess())) {
+ // swap thumbnail dimensions
+ cam_dimension_t tmp_dim = encode_parm.thumb_dim.dst_dim;
+ encode_parm.thumb_dim.dst_dim.width = tmp_dim.height;
+ encode_parm.thumb_dim.dst_dim.height = tmp_dim.width;
+ }
+ pStreamMem = thumb_stream->getStreamBufs();
+ if (pStreamMem == NULL) {
+ LOGE("cannot get stream bufs from thumb stream");
+ ret = BAD_VALUE;
+ goto on_error;
+ }
+ cam_frame_len_offset_t thumb_offset;
+ memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
+ thumb_stream->getFrameOffset(thumb_offset);
+ encode_parm.num_tmb_bufs = pStreamMem->getCnt();
+ for (uint32_t i = 0; i < pStreamMem->getCnt(); i++) {
+ camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+ if (stream_mem != NULL) {
+ encode_parm.src_thumb_buf[i].index = i;
+ encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
+ encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+ encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
+ encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+ encode_parm.src_thumb_buf[i].offset = thumb_offset;
+ }
+ }
+ cam_format_t img_fmt_thumb = CAM_FORMAT_YUV_420_NV12;
+ thumb_stream->getFormat(img_fmt_thumb);
+ encode_parm.thumb_color_format = getColorfmtFromImgFmt(img_fmt_thumb);
+
+ // crop is the same if frame is the same
+ if (thumb_stream != main_stream) {
+ memset(&crop, 0, sizeof(cam_rect_t));
+ thumb_stream->getCropInfo(crop);
+ }
+
+ memset(&src_dim, 0, sizeof(cam_dimension_t));
+ thumb_stream->getFrameDimension(src_dim);
+ encode_parm.thumb_dim.src_dim = src_dim;
+
+ if (!m_parent->needRotationReprocess()) {
+ encode_parm.thumb_rotation = m_parent->mParameters.getJpegRotation();
+ }
+ encode_parm.thumb_dim.crop = crop;
+ encode_parm.thumb_from_postview =
+ !m_parent->mParameters.generateThumbFromMain() &&
+ (img_fmt_thumb != CAM_FORMAT_YUV_420_NV12_UBWC) &&
+ (m_parent->mParameters.useJpegExifRotation() ||
+ m_parent->mParameters.getJpegRotation() == 0);
+ LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
+ "src_dim = %dX%d, dst_dim = %dX%d",
+ encode_parm.num_tmb_bufs,
+ thumb_offset.mp[0].width, thumb_offset.mp[0].height,
+ thumb_offset.frame_len, encode_parm.thumb_rotation,
+ encode_parm.thumb_dim.src_dim.width,
+ encode_parm.thumb_dim.src_dim.height,
+ encode_parm.thumb_dim.dst_dim.width,
+ encode_parm.thumb_dim.dst_dim.height);
+ }
+
+ if (m_parent->mParameters.useJpegExifRotation()){
+ encode_parm.thumb_rotation = m_parent->mParameters.getJpegExifRotation();
+ }
+
+ encode_parm.num_dst_bufs = 1;
+ if (mUseJpegBurst) {
+ encode_parm.num_dst_bufs = MAX_JPEG_BURST;
+ }
+ encode_parm.get_memory = NULL;
+ out_size = main_offset.frame_len;
+ if (mJpegMemOpt) {
+ encode_parm.get_memory = getJpegMemory;
+ encode_parm.put_memory = releaseJpegMemory;
+ out_size = sizeof(omx_jpeg_ouput_buf_t);
+ encode_parm.num_dst_bufs = encode_parm.num_src_bufs;
+ }
+ m_JpegOutputMemCount = (uint32_t)encode_parm.num_dst_bufs;
+ for (uint32_t i = 0; i < m_JpegOutputMemCount; i++) {
+ if (m_pJpegOutputMem[i] != NULL)
+ free(m_pJpegOutputMem[i]);
+ omx_jpeg_ouput_buf_t omx_out_buf;
+ memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
+ omx_out_buf.handle = this;
+ // allocate output buf for jpeg encoding
+ m_pJpegOutputMem[i] = malloc(out_size);
+
+ if (NULL == m_pJpegOutputMem[i]) {
+ ret = NO_MEMORY;
+ LOGE("initHeapMem for jpeg, ret = NO_MEMORY");
+ goto on_error;
+ }
+
+ if (mJpegMemOpt) {
+ memcpy(m_pJpegOutputMem[i], &omx_out_buf, sizeof(omx_out_buf));
+ }
+
+ encode_parm.dest_buf[i].index = i;
+ encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
+ encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMem[i];
+ encode_parm.dest_buf[i].fd = -1;
+ encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
+ encode_parm.dest_buf[i].offset = main_offset;
+ }
+
+ LOGD("X");
+ return NO_ERROR;
+
+on_error:
+ FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem, m_JpegOutputMemCount);
+
+ LOGD("X with error %d", ret);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify through notify callback registered by upper layer
+ *
+ * PARAMETERS :
+ * @msg_type: msg type of notify
+ * @ext1 : extension
+ * @ext2 : extension
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
+ int32_t ext1,
+ int32_t ext2)
+{
+ return m_parent->sendEvtNotify(msg_type, ext1, ext2);
+}
+
+/*===========================================================================
+ * FUNCTION : sendDataNotify
+ *
+ * DESCRIPTION: enqueue data into dataNotify thread
+ *
+ * PARAMETERS :
+ * @msg_type: data callback msg type
+ * @data : ptr to data memory struct
+ * @index : index to data buffer
+ * @metadata: ptr to meta data buffer if there is any
+ * @release_data : ptr to struct indicating if data need to be released
+ * after notify
+ * @super_buf_frame_idx : super buffer frame index
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
+ camera_memory_t *data,
+ uint8_t index,
+ camera_frame_metadata_t *metadata,
+ qcamera_release_data_t *release_data,
+ uint32_t super_buf_frame_idx)
+{
+ qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
+ if (NULL == data_cb) {
+ LOGE("no mem for acamera_data_argm_t");
+ return NO_MEMORY;
+ }
+ memset(data_cb, 0, sizeof(qcamera_data_argm_t));
+ data_cb->msg_type = msg_type;
+ data_cb->data = data;
+ data_cb->index = index;
+ data_cb->metadata = metadata;
+ if (release_data != NULL) {
+ data_cb->release_data = *release_data;
+ }
+
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
+ cbArg.msg_type = msg_type;
+ cbArg.data = data;
+ cbArg.metadata = metadata;
+ cbArg.user_data = data_cb;
+ cbArg.cookie = this;
+ cbArg.release_cb = releaseNotifyData;
+ cbArg.frame_index = super_buf_frame_idx;
+ int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
+ if ( NO_ERROR != rc ) {
+ LOGE("Error enqueuing jpeg data into notify queue");
+ releaseNotifyData(data_cb, this, UNKNOWN_ERROR);
+ return UNKNOWN_ERROR;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : validatePostProcess
+ *
+ * DESCRIPTION: Verify output buffer count of pp module
+ *
+ * PARAMETERS :
+ * @frame : process frame received from mm-camera-interface
+ *
+ * RETURN : bool type of status
+ * TRUE -- success
+ * FALSE failure
+ *==========================================================================*/
+bool QCameraPostProcessor::validatePostProcess(mm_camera_super_buf_t *frame)
+{
+ bool status = TRUE;
+ QCameraChannel *pChannel = NULL;
+ QCameraReprocessChannel *m_pReprocChannel = NULL;
+
+ if (frame == NULL) {
+ return status;
+ }
+
+ pChannel = m_parent->getChannelByHandle(frame->ch_id);
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if (pChannel == mPPChannels[i]->getSrcChannel()) {
+ m_pReprocChannel = mPPChannels[i];
+ break;
+ }
+ }
+
+ if ((m_pReprocChannel != NULL) && (pChannel == m_pReprocChannel->getSrcChannel())) {
+ QCameraStream *pStream = NULL;
+ for (uint8_t i = 0; i < m_pReprocChannel->getNumOfStreams(); i++) {
+ pStream = m_pReprocChannel->getStreamByIndex(i);
+ if (pStream && (m_inputPPQ.getCurrentSize() > 0) &&
+ (pStream->getNumQueuedBuf() <= 0)) {
+ LOGW("Out of PP Buffer PPQ = %d ongoingQ = %d Jpeg = %d onJpeg = %d",
+ m_inputPPQ.getCurrentSize(), m_inputPPQ.getCurrentSize(),
+ m_inputJpegQ.getCurrentSize(), m_ongoingJpegQ.getCurrentSize());
+ status = FALSE;
+ break;
+ }
+ }
+ }
+ return status;
+}
+
+/*===========================================================================
+ * FUNCTION : getOfflinePPInputBuffer
+ *
+ * DESCRIPTION: Function to generate offline post proc buffer
+ *
+ * PARAMETERS :
+ * @src_frame : process frame received from mm-camera-interface
+ *
+ * RETURN : Buffer pointer if successfull
+ * : NULL in case of failures
+ *==========================================================================*/
+mm_camera_buf_def_t *QCameraPostProcessor::getOfflinePPInputBuffer(
+ mm_camera_super_buf_t *src_frame)
+{
+ mm_camera_buf_def_t *mBufDefs = NULL;
+ QCameraChannel *pChannel = NULL;
+ QCameraStream *src_pStream = NULL;
+ mm_camera_buf_def_t *data_frame = NULL;
+ mm_camera_buf_def_t *meta_frame = NULL;
+
+ if (mOfflineDataBufs == NULL) {
+ LOGE("Offline Buffer not allocated");
+ return NULL;
+ }
+
+ uint32_t num_bufs = mOfflineDataBufs->getCnt();
+ size_t bufDefsSize = num_bufs * sizeof(mm_camera_buf_def_t);
+ mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
+ if (mBufDefs == NULL) {
+ LOGE("No memory");
+ return NULL;
+ }
+ memset(mBufDefs, 0, bufDefsSize);
+
+ pChannel = m_parent->getChannelByHandle(src_frame->ch_id);
+ for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
+ src_pStream = pChannel->getStreamByHandle(
+ src_frame->bufs[i]->stream_id);
+ if (src_pStream != NULL) {
+ if (src_pStream->getMyType() == CAM_STREAM_TYPE_RAW) {
+ LOGH("Found RAW input stream");
+ data_frame = src_frame->bufs[i];
+ } else if (src_pStream->getMyType() == CAM_STREAM_TYPE_METADATA){
+ LOGH("Found Metada input stream");
+ meta_frame = src_frame->bufs[i];
+ }
+ }
+ }
+
+ if ((src_pStream != NULL) && (data_frame != NULL)) {
+ cam_frame_len_offset_t offset;
+ memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+ src_pStream->getFrameOffset(offset);
+ for (uint32_t i = 0; i < num_bufs; i++) {
+ mBufDefs[i] = *data_frame;
+ mOfflineDataBufs->getBufDef(offset, mBufDefs[i], i);
+
+ LOGD("Dumping RAW data on offline buffer");
+ /*Actual data memcpy just for verification*/
+ memcpy(mBufDefs[i].buffer, data_frame->buffer,
+ mBufDefs[i].frame_len);
+ }
+ releaseSuperBuf(src_frame, CAM_STREAM_TYPE_RAW);
+ } else {
+ free(mBufDefs);
+ mBufDefs = NULL;
+ }
+
+ LOGH("mBufDefs = %p", mBufDefs);
+ return mBufDefs;
+}
+
+/*===========================================================================
+ * FUNCTION : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ * @frame : process frame received from mm-camera-interface
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : depends on if offline reprocess is needed, received frame will
+ * be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
+{
+ if (m_bInited == FALSE) {
+ LOGE("postproc not initialized yet");
+ return UNKNOWN_ERROR;
+ }
+
+ if (frame == NULL) {
+ LOGE("Invalid parameter");
+ return UNKNOWN_ERROR;
+ }
+
+ mm_camera_buf_def_t *meta_frame = NULL;
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ // look through input superbuf
+ if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+ meta_frame = frame->bufs[i];
+ break;
+ }
+ }
+ if (meta_frame != NULL) {
+ //Function to upadte metadata for frame based parameter
+ m_parent->updateMetadata((metadata_buffer_t *)meta_frame->buffer);
+ }
+
+ if (m_parent->needReprocess()) {
+ if ((!m_parent->isLongshotEnabled() &&
+ !m_parent->m_stateMachine.isNonZSLCaptureRunning()) ||
+ (m_parent->isLongshotEnabled() &&
+ m_parent->isCaptureShutterEnabled())) {
+ //play shutter sound
+ m_parent->playShutter();
+ }
+
+ ATRACE_INT("Camera:Reprocess", 1);
+ LOGH("need reprocess");
+
+ // enqueu to post proc input queue
+ qcamera_pp_data_t *pp_request_job =
+ (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
+ if (pp_request_job == NULL) {
+ LOGE("No memory for pproc job");
+ return NO_MEMORY;
+ }
+ memset(pp_request_job, 0, sizeof(qcamera_pp_data_t));
+ pp_request_job->src_frame = frame;
+ pp_request_job->src_reproc_frame = frame;
+ pp_request_job->reprocCount = 0;
+ pp_request_job->ppChannelIndex = 0;
+
+ if ((NULL != frame) &&
+ (0 < frame->num_bufs)
+ && (m_parent->isRegularCapture())) {
+ /*Regular capture. Source stream will be deleted*/
+ mm_camera_buf_def_t *bufs = NULL;
+ uint32_t num_bufs = frame->num_bufs;
+ bufs = new mm_camera_buf_def_t[num_bufs];
+ if (NULL == bufs) {
+ LOGE("Unable to allocate cached buffers");
+ return NO_MEMORY;
+ }
+
+ for (uint32_t i = 0; i < num_bufs; i++) {
+ bufs[i] = *frame->bufs[i];
+ frame->bufs[i] = &bufs[i];
+ }
+ pp_request_job->src_reproc_bufs = bufs;
+
+ // Don't release source frame after encoding
+ // at this point the source channel will not exist.
+ pp_request_job->reproc_frame_release = true;
+ }
+
+ if (mOfflineDataBufs != NULL) {
+ pp_request_job->offline_reproc_buf =
+ getOfflinePPInputBuffer(frame);
+ if (pp_request_job->offline_reproc_buf != NULL) {
+ pp_request_job->offline_buffer = true;
+ }
+ }
+
+ if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
+ LOGW("Input PP Q is not active!!!");
+ releaseSuperBuf(frame);
+ free(frame);
+ free(pp_request_job);
+ frame = NULL;
+ pp_request_job = NULL;
+ return NO_ERROR;
+ }
+ if (m_parent->mParameters.isAdvCamFeaturesEnabled()
+ && (meta_frame != NULL)) {
+ m_InputMetadata.add(meta_frame);
+ }
+ } else if (m_parent->mParameters.isNV16PictureFormat() ||
+ m_parent->mParameters.isNV21PictureFormat()) {
+ //check if raw frame information is needed.
+ if(m_parent->mParameters.isYUVFrameInfoNeeded())
+ setYUVFrameInfo(frame);
+
+ processRawData(frame);
+ } else {
+ //play shutter sound
+ if(!m_parent->m_stateMachine.isNonZSLCaptureRunning() &&
+ !m_parent->mLongshotEnabled)
+ m_parent->playShutter();
+
+ LOGH("no need offline reprocess, sending to jpeg encoding");
+ qcamera_jpeg_data_t *jpeg_job =
+ (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+ if (jpeg_job == NULL) {
+ LOGE("No memory for jpeg job");
+ return NO_MEMORY;
+ }
+
+ memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+ jpeg_job->src_frame = frame;
+
+ if (meta_frame != NULL) {
+ // fill in meta data frame ptr
+ jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
+ }
+
+ // enqueu to jpeg input queue
+ if (!m_inputJpegQ.enqueue((void *)jpeg_job)) {
+ LOGW("Input Jpeg Q is not active!!!");
+ releaseJpegJobData(jpeg_job);
+ free(jpeg_job);
+ jpeg_job = NULL;
+ return NO_ERROR;
+ }
+ }
+
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processRawData
+ *
+ * DESCRIPTION: enqueue raw data into dataProc thread
+ *
+ * PARAMETERS :
+ * @frame : process frame received from mm-camera-interface
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
+{
+ if (m_bInited == FALSE) {
+ LOGE("postproc not initialized yet");
+ return UNKNOWN_ERROR;
+ }
+
+ // enqueu to raw input queue
+ if (m_inputRawQ.enqueue((void *)frame)) {
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ } else {
+ LOGW("m_inputRawQ is not active!!!");
+ releaseSuperBuf(frame);
+ free(frame);
+ frame = NULL;
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : processJpegEvt
+ *
+ * DESCRIPTION: process jpeg event from mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ * @evt : payload of jpeg event, including information about jpeg encoding
+ * status, jpeg size and so on.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : This event will also trigger DataProc thread to move to next job
+ * processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
+ * if there is any pending job in jpeg input queue)
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
+{
+ if (m_bInited == FALSE) {
+ LOGE("postproc not initialized yet");
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t rc = NO_ERROR;
+ camera_memory_t *jpeg_mem = NULL;
+ omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+ void *jpegData = NULL;
+ if (mUseSaveProc && m_parent->isLongshotEnabled()) {
+ qcamera_jpeg_evt_payload_t *saveData = ( qcamera_jpeg_evt_payload_t * ) malloc(sizeof(qcamera_jpeg_evt_payload_t));
+ if ( NULL == saveData ) {
+ LOGE("Can not allocate save data message!");
+ return NO_MEMORY;
+ }
+ *saveData = *evt;
+ if (m_inputSaveQ.enqueue((void *) saveData)) {
+ m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ } else {
+ LOGD("m_inputSaveQ PP Q is not active!!!");
+ free(saveData);
+ saveData = NULL;
+ return rc;
+ }
+ } else {
+ /* To be removed later when ISP Frame sync feature is available
+ qcamera_jpeg_data_t *jpeg_job =
+ (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue(matchJobId,
+ (void*)&evt->jobId);
+ uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;*/
+ uint32_t frame_idx = 75;
+ LOGH("FRAME INDEX %d", frame_idx);
+ // Release jpeg job data
+ m_ongoingJpegQ.flushNodes(matchJobId, (void*)&evt->jobId);
+
+ if (m_inputPPQ.getCurrentSize() > 0) {
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ }
+ LOGH("[KPI Perf] : jpeg job %d", evt->jobId);
+
+ if ((false == m_parent->m_bIntJpegEvtPending) &&
+ (m_parent->mDataCb == NULL ||
+ m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 )) {
+ LOGW("No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled");
+ rc = NO_ERROR;
+ goto end;
+ }
+
+ if(evt->status == JPEG_JOB_STATUS_ERROR) {
+ LOGE("Error event handled from jpeg, status = %d",
+ evt->status);
+ rc = FAILED_TRANSACTION;
+ goto end;
+ }
+ if (!mJpegMemOpt) {
+ jpegData = evt->out_data.buf_vaddr;
+ }
+ else {
+ jpeg_out = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+ if (jpeg_out != NULL) {
+ jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+ if (jpeg_mem != NULL) {
+ jpegData = jpeg_mem->data;
+ }
+ }
+ }
+ m_parent->dumpJpegToFile(jpegData,
+ evt->out_data.buf_filled_len,
+ evt->jobId);
+ LOGH("Dump jpeg_size=%d", evt->out_data.buf_filled_len);
+ if(true == m_parent->m_bIntJpegEvtPending) {
+ //Sending JPEG snapshot taken notification to HAL
+ pthread_mutex_lock(&m_parent->m_int_lock);
+ pthread_cond_signal(&m_parent->m_int_cond);
+ pthread_mutex_unlock(&m_parent->m_int_lock);
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ return rc;
+ }
+ if (!mJpegMemOpt) {
+ // alloc jpeg memory to pass to upper layer
+ jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len,
+ 1, m_parent->mCallbackCookie);
+ if (NULL == jpeg_mem) {
+ rc = NO_MEMORY;
+ LOGE("getMemory for jpeg, ret = NO_MEMORY");
+ goto end;
+ }
+ memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
+ }
+ LOGH("Calling upperlayer callback to store JPEG image");
+ qcamera_release_data_t release_data;
+ memset(&release_data, 0, sizeof(qcamera_release_data_t));
+ release_data.data = jpeg_mem;
+ LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
+ rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+ jpeg_mem,
+ 0,
+ NULL,
+ &release_data,
+ frame_idx);
+ m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
+
+end:
+ if (rc != NO_ERROR) {
+ // send error msg to upper layer
+ LOGE("Jpeg Encoding failed. Notify Application");
+ sendEvtNotify(CAMERA_MSG_ERROR,
+ UNKNOWN_ERROR,
+ 0);
+
+ if (NULL != jpeg_mem) {
+ jpeg_mem->release(jpeg_mem);
+ jpeg_mem = NULL;
+ }
+ }
+
+ /* check whether to send callback for depth map */
+ if (m_parent->mParameters.isUbiRefocus() &&
+ (m_parent->getOutputImageCount() + 1 ==
+ m_parent->mParameters.getRefocusOutputCount())) {
+ m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
+
+ jpeg_mem = m_DataMem;
+ release_data.data = jpeg_mem;
+ m_DataMem = NULL;
+ LOGH("[KPI Perf]: send jpeg callback for depthmap ");
+ rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+ jpeg_mem,
+ 0,
+ NULL,
+ &release_data,
+ frame_idx);
+ if (rc != NO_ERROR) {
+ // send error msg to upper layer
+ sendEvtNotify(CAMERA_MSG_ERROR,
+ UNKNOWN_ERROR,
+ 0);
+ if (NULL != jpeg_mem) {
+ jpeg_mem->release(jpeg_mem);
+ jpeg_mem = NULL;
+ }
+ }
+ m_DataMem = NULL;
+ }
+ }
+
+ // wait up data proc thread to do next job,
+ // if previous request is blocked due to ongoing jpeg job
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : processPPData
+ *
+ * DESCRIPTION: process received frame after reprocess.
+ *
+ * PARAMETERS :
+ * @frame : received frame from reprocess channel.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : The frame after reprocess need to send to jpeg encoding.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
+{
+ bool triggerEvent = TRUE;
+
+ LOGD("QCameraPostProcessor::processPPData");
+ bool needSuperBufMatch = m_parent->mParameters.generateThumbFromMain();
+ if (m_bInited == FALSE) {
+ LOGE("postproc not initialized yet");
+ return UNKNOWN_ERROR;
+ }
+
+ qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
+ if (NULL == job) {
+ LOGE("Cannot find reprocess job");
+ return BAD_VALUE;
+ }
+
+ if (!needSuperBufMatch && (job->src_frame == NULL
+ || job->src_reproc_frame == NULL) ) {
+ LOGE("Invalid reprocess job");
+ return BAD_VALUE;
+ }
+
+ if (!needSuperBufMatch && (m_parent->mParameters.isNV16PictureFormat() ||
+ m_parent->mParameters.isNV21PictureFormat())) {
+ releaseOngoingPPData(job, this);
+ free(job);
+
+ if(m_parent->mParameters.isYUVFrameInfoNeeded())
+ setYUVFrameInfo(frame);
+ return processRawData(frame);
+ }
+#ifdef TARGET_TS_MAKEUP
+ // find snapshot frame frame
+ mm_camera_buf_def_t *pReprocFrame = NULL;
+ QCameraStream * pSnapshotStream = NULL;
+ QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
+ if (pChannel == NULL) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+ if (pChannel == NULL) {
+ LOGE("No corresponding channel (ch_id = %d) exist, return here",
+ frame->ch_id);
+ return BAD_VALUE;
+ }
+
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ pSnapshotStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pSnapshotStream != NULL) {
+ if (pSnapshotStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ pReprocFrame = frame->bufs[i];
+ break;
+ }
+ }
+ }
+ if (pReprocFrame != NULL && m_parent->mParameters.isFaceDetectionEnabled()) {
+ m_parent->TsMakeupProcess_Snapshot(pReprocFrame,pSnapshotStream);
+ } else {
+ LOGH("pReprocFrame == NULL || isFaceDetectionEnabled = %d",
+ m_parent->mParameters.isFaceDetectionEnabled());
+ }
+#endif
+ if ((m_parent->isLongshotEnabled())
+ && (!m_parent->isCaptureShutterEnabled())
+ && (!m_parent->mCACDoneReceived)) {
+ // play shutter sound for longshot
+ // after reprocess is done
+ m_parent->playShutter();
+ }
+ m_parent->mCACDoneReceived = FALSE;
+
+ int8_t mCurReprocCount = job->reprocCount;
+ int8_t mCurChannelIndex = job->ppChannelIndex;
+ if ( mCurReprocCount > 1 ) {
+ //In case of pp 2nd pass, we can release input of 2nd pass
+ releaseSuperBuf(job->src_frame);
+ free(job->src_frame);
+ job->src_frame = NULL;
+ }
+
+ LOGD("mCurReprocCount = %d mCurChannelIndex = %d mTotalNumReproc = %d",
+ mCurReprocCount, mCurChannelIndex,
+ m_parent->mParameters.getReprocCount());
+ if (mCurReprocCount < m_parent->mParameters.getReprocCount()) {
+ //More pp pass needed. Push frame back to pp queue.
+ qcamera_pp_data_t *pp_request_job = job;
+ pp_request_job->src_frame = frame;
+
+ if ((mPPChannels[mCurChannelIndex]->getReprocCount()
+ == mCurReprocCount) &&
+ (mPPChannels[mCurChannelIndex + 1] != NULL)) {
+ pp_request_job->ppChannelIndex++;
+ }
+
+ // enqueu to post proc input queue
+ if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
+ LOGW("m_input PP Q is not active!!!");
+ releaseOngoingPPData(pp_request_job,this);
+ free(pp_request_job);
+ pp_request_job = NULL;
+ triggerEvent = FALSE;
+ }
+ } else {
+ //Done with post processing. Send frame to Jpeg
+ qcamera_jpeg_data_t *jpeg_job =
+ (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+ if (jpeg_job == NULL) {
+ LOGE("No memory for jpeg job");
+ return NO_MEMORY;
+ }
+
+ memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+ jpeg_job->src_frame = frame;
+ jpeg_job->src_reproc_frame = job ? job->src_reproc_frame : NULL;
+ jpeg_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
+ jpeg_job->reproc_frame_release = job ? job->reproc_frame_release : false;
+ jpeg_job->offline_reproc_buf = job ? job->offline_reproc_buf : NULL;
+ jpeg_job->offline_buffer = job ? job->offline_buffer : false;
+
+ // find meta data frame
+ mm_camera_buf_def_t *meta_frame = NULL;
+ if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
+ size_t meta_idx = m_parent->mParameters.getExifBufIndex(m_PPindex);
+ if (m_InputMetadata.size() >= (meta_idx + 1)) {
+ meta_frame = m_InputMetadata.itemAt(meta_idx);
+ } else {
+ LOGW("Input metadata vector contains %d entries, index required %d",
+ m_InputMetadata.size(), meta_idx);
+ }
+ m_PPindex++;
+ } else {
+ for (uint32_t i = 0; job && job->src_reproc_frame &&
+ (i < job->src_reproc_frame->num_bufs); i++) {
+ // look through input superbuf
+ if (job->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+ meta_frame = job->src_reproc_frame->bufs[i];
+ break;
+ }
+ }
+
+ if (meta_frame == NULL) {
+ // look through reprocess superbuf
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+ meta_frame = frame->bufs[i];
+ break;
+ }
+ }
+ }
+ }
+ if (meta_frame != NULL) {
+ // fill in meta data frame ptr
+ jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
+ }
+
+ // enqueu reprocessed frame to jpeg input queue
+ if (false == m_inputJpegQ.enqueue((void *)jpeg_job)) {
+ LOGW("Input Jpeg Q is not active!!!");
+ releaseJpegJobData(jpeg_job);
+ free(jpeg_job);
+ jpeg_job = NULL;
+ triggerEvent = FALSE;
+ }
+
+ // free pp job buf
+ pthread_mutex_lock(&m_reprocess_lock);
+ if (job) {
+ free(job);
+ }
+ pthread_mutex_unlock(&m_reprocess_lock);
+ }
+
+ LOGD("");
+ // wait up data proc thread
+
+ if (triggerEvent) {
+ m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : findJpegJobByJobId
+ *
+ * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
+ *
+ * PARAMETERS :
+ * @jobId : job Id of the job
+ *
+ * RETURN : ptr to a jpeg job struct. NULL if not found.
+ *
+ * NOTE : Currently only one job is sending to mm-jpeg-interface for jpeg
+ * encoding. Therefore simply dequeue from the ongoing Jpeg Queue
+ * will serve the purpose to find the jpeg job.
+ *==========================================================================*/
+qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
+{
+ qcamera_jpeg_data_t * job = NULL;
+ if (jobId == 0) {
+ LOGE("not a valid jpeg jobId");
+ return NULL;
+ }
+
+ // currely only one jpeg job ongoing, so simply dequeue the head
+ job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+ return job;
+}
+
+/*===========================================================================
+ * FUNCTION : releasePPInputData
+ *
+ * DESCRIPTION: callback function to release post process input data node
+ *
+ * PARAMETERS :
+ * @data : ptr to post process input data
+ * @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
+{
+ QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+ if (NULL != pme) {
+ qcamera_pp_request_t *pp_job = (qcamera_pp_request_t *)data;
+ if (NULL != pp_job->src_frame) {
+ pme->releaseSuperBuf(pp_job->src_frame);
+ if (pp_job->src_frame == pp_job->src_reproc_frame)
+ pp_job->src_reproc_frame = NULL;
+ free(pp_job->src_frame);
+ pp_job->src_frame = NULL;
+ }
+ if (NULL != pp_job->src_reproc_frame) {
+ pme->releaseSuperBuf(pp_job->src_reproc_frame);
+ free(pp_job->src_reproc_frame);
+ pp_job->src_reproc_frame = NULL;
+ }
+ pp_job->reprocCount = 0;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : releaseJpegData
+ *
+ * DESCRIPTION: callback function to release jpeg job node
+ *
+ * PARAMETERS :
+ * @data : ptr to ongoing jpeg job data
+ * @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
+{
+ QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+ if (NULL != pme) {
+ pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
+ LOGH("Rleased job ID %u",
+ ((qcamera_jpeg_data_t *)data)->jobId);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : releaseOngoingPPData
+ *
+ * DESCRIPTION: callback function to release ongoing postprocess job node
+ *
+ * PARAMETERS :
+ * @data : ptr to onging postprocess job
+ * @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
+{
+ QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+ if (NULL != pme) {
+ qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
+ if (NULL != pp_job->src_frame) {
+ if (!pp_job->reproc_frame_release) {
+ pme->releaseSuperBuf(pp_job->src_frame);
+ }
+ if (pp_job->src_frame == pp_job->src_reproc_frame)
+ pp_job->src_reproc_frame = NULL;
+
+ free(pp_job->src_frame);
+ pp_job->src_frame = NULL;
+ }
+ if (NULL != pp_job->src_reproc_frame) {
+ pme->releaseSuperBuf(pp_job->src_reproc_frame);
+ free(pp_job->src_reproc_frame);
+ pp_job->src_reproc_frame = NULL;
+ }
+ if ((pp_job->offline_reproc_buf != NULL)
+ && (pp_job->offline_buffer)) {
+ free(pp_job->offline_reproc_buf);
+ pp_job->offline_buffer = false;
+ }
+ pp_job->reprocCount = 0;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : releaseNotifyData
+ *
+ * DESCRIPTION: function to release internal resources in notify data struct
+ *
+ * PARAMETERS :
+ * @user_data : ptr user data
+ * @cookie : callback cookie
+ * @cb_status : callback status
+ *
+ * RETURN : None
+ *
+ * NOTE : deallocate jpeg heap memory if it's not NULL
+ *==========================================================================*/
+void QCameraPostProcessor::releaseNotifyData(void *user_data,
+ void *cookie,
+ int32_t cb_status)
+{
+ LOGD("releaseNotifyData release_data %p", user_data);
+
+ qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
+ QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
+ if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
+
+ if ( postProc->mUseSaveProc &&
+ app_cb->release_data.unlinkFile &&
+ ( NO_ERROR != cb_status ) ) {
+
+ String8 unlinkPath((const char *) app_cb->release_data.data->data,
+ app_cb->release_data.data->size);
+ int rc = unlink(unlinkPath.string());
+ LOGH("Unlinking stored file rc = %d",
+ rc);
+ }
+
+ if (app_cb && NULL != app_cb->release_data.data) {
+ app_cb->release_data.data->release(app_cb->release_data.data);
+ app_cb->release_data.data = NULL;
+ }
+ if (app_cb && NULL != app_cb->release_data.frame) {
+ postProc->releaseSuperBuf(app_cb->release_data.frame);
+ free(app_cb->release_data.frame);
+ app_cb->release_data.frame = NULL;
+ }
+ if (app_cb && NULL != app_cb->release_data.streamBufs) {
+ app_cb->release_data.streamBufs->deallocate();
+ delete app_cb->release_data.streamBufs;
+ app_cb->release_data.streamBufs = NULL;
+ }
+ free(app_cb);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : releaseSuperBuf
+ *
+ * DESCRIPTION: function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ * @super_buf : ptr to the superbuf frame
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
+{
+ QCameraChannel *pChannel = NULL;
+
+ if (NULL != super_buf) {
+ pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
+
+ if ( NULL == pChannel ) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == super_buf->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+
+ if (pChannel != NULL) {
+ pChannel->bufDone(super_buf);
+ } else {
+ LOGE("Channel id %d not found!!",
+ super_buf->ch_id);
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : releaseSuperBuf
+ *
+ * DESCRIPTION : function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ * @super_buf : ptr to the superbuf frame
+ * @stream_type: Type of stream to be released
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf,
+ cam_stream_type_t stream_type)
+{
+ QCameraChannel *pChannel = NULL;
+
+ if (NULL != super_buf) {
+ pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
+ if (pChannel == NULL) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == super_buf->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+
+ if (pChannel != NULL) {
+ for (uint32_t i = 0; i < super_buf->num_bufs; i++) {
+ if (super_buf->bufs[i] != NULL) {
+ QCameraStream *pStream =
+ pChannel->getStreamByHandle(super_buf->bufs[i]->stream_id);
+ if ((pStream != NULL) && ((pStream->getMyType() == stream_type)
+ || (pStream->getMyOriginalType() == stream_type))) {
+ pChannel->bufDone(super_buf, super_buf->bufs[i]->stream_id);
+ break;
+ }
+ }
+ }
+ } else {
+ LOGE("Channel id %d not found!!",
+ super_buf->ch_id);
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : releaseJpegJobData
+ *
+ * DESCRIPTION: function to release internal resources in jpeg job struct
+ *
+ * PARAMETERS :
+ * @job : ptr to jpeg job struct
+ *
+ * RETURN : None
+ *
+ * NOTE : original source frame need to be queued back to kernel for
+ * future use. Output buf of jpeg job need to be released since
+ * it's allocated for each job. Exif object need to be deleted.
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
+{
+ LOGD("E");
+ if (NULL != job) {
+ if (NULL != job->src_reproc_frame) {
+ if (!job->reproc_frame_release) {
+ releaseSuperBuf(job->src_reproc_frame);
+ }
+ free(job->src_reproc_frame);
+ job->src_reproc_frame = NULL;
+ }
+
+ if (NULL != job->src_frame) {
+ releaseSuperBuf(job->src_frame);
+ free(job->src_frame);
+ job->src_frame = NULL;
+ }
+
+ if (NULL != job->pJpegExifObj) {
+ delete job->pJpegExifObj;
+ job->pJpegExifObj = NULL;
+ }
+
+ if (NULL != job->src_reproc_bufs) {
+ delete [] job->src_reproc_bufs;
+ }
+
+ if ((job->offline_reproc_buf != NULL)
+ && (job->offline_buffer)) {
+ free(job->offline_reproc_buf);
+ job->offline_buffer = false;
+ }
+ }
+ LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION : releaseSaveJobData
+ *
+ * DESCRIPTION: function to release internal resources in store jobs
+ *
+ * PARAMETERS :
+ * @job : ptr to save job struct
+ *
+ * RETURN : None
+ *
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSaveJobData(void *data, void *user_data)
+{
+ LOGD("E");
+
+ QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
+ if (NULL == pme) {
+ LOGE("Invalid postproc handle");
+ return;
+ }
+
+ qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) data;
+ if (job_data == NULL) {
+ LOGE("Invalid jpeg event data");
+ return;
+ }
+
+ // find job by jobId
+ qcamera_jpeg_data_t *job = pme->findJpegJobByJobId(job_data->jobId);
+
+ if (NULL != job) {
+ pme->releaseJpegJobData(job);
+ free(job);
+ } else {
+ LOGE("Invalid jpeg job");
+ }
+
+ LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION : releaseRawData
+ *
+ * DESCRIPTION: function to release internal resources in store jobs
+ *
+ * PARAMETERS :
+ * @job : ptr to save job struct
+ *
+ * RETURN : None
+ *
+ *==========================================================================*/
+void QCameraPostProcessor::releaseRawData(void *data, void *user_data)
+{
+ LOGD("E");
+
+ QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
+ if (NULL == pme) {
+ LOGE("Invalid postproc handle");
+ return;
+ }
+ mm_camera_super_buf_t *super_buf = (mm_camera_super_buf_t *) data;
+ pme->releaseSuperBuf(super_buf);
+
+ LOGD("X");
+}
+
+
+/*===========================================================================
+ * FUNCTION : getColorfmtFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg color format based on its image format
+ *
+ * PARAMETERS :
+ * @img_fmt : image format
+ *
+ * RETURN : jpeg color format that can be understandable by omx lib
+ *==========================================================================*/
+mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
+{
+ switch (img_fmt) {
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+ return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+ case CAM_FORMAT_YUV_420_YV12:
+ return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+ case CAM_FORMAT_YUV_422_NV61:
+ return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
+ case CAM_FORMAT_YUV_422_NV16:
+ return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
+ default:
+ return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegImgTypeFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg encode image type based on its image format
+ *
+ * PARAMETERS :
+ * @img_fmt : image format
+ *
+ * RETURN : return jpeg source image format (YUV or Bitstream)
+ *==========================================================================*/
+mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
+{
+ switch (img_fmt) {
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+ case CAM_FORMAT_YUV_420_YV12:
+ case CAM_FORMAT_YUV_422_NV61:
+ case CAM_FORMAT_YUV_422_NV16:
+ return MM_JPEG_FMT_YUV;
+ default:
+ return MM_JPEG_FMT_YUV;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : queryStreams
+ *
+ * DESCRIPTION: utility method for retrieving main, thumbnail and reprocess
+ * streams and frame from bundled super buffer
+ *
+ * PARAMETERS :
+ * @main : ptr to main stream if present
+ * @thumb : ptr to thumbnail stream if present
+ * @reproc : ptr to reprocess stream if present
+ * @main_image : ptr to main image if present
+ * @thumb_image: ptr to thumbnail image if present
+ * @frame : bundled super buffer
+ * @reproc_frame : bundled source frame buffer
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::queryStreams(QCameraStream **main,
+ QCameraStream **thumb,
+ QCameraStream **reproc,
+ mm_camera_buf_def_t **main_image,
+ mm_camera_buf_def_t **thumb_image,
+ mm_camera_super_buf_t *frame,
+ mm_camera_super_buf_t *reproc_frame)
+{
+ if (NULL == frame) {
+ return NO_INIT;
+ }
+
+ QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
+ // check reprocess channel if not found
+ if (pChannel == NULL) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+ if (pChannel == NULL) {
+ LOGD("No corresponding channel (ch_id = %d) exist, return here",
+ frame->ch_id);
+ return BAD_VALUE;
+ }
+
+ // Use snapshot stream to create thumbnail if snapshot and preview
+ // flip settings doesn't match in ZSL mode.
+ bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
+ (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
+ m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
+ !m_parent->mParameters.generateThumbFromMain());
+
+ *main = *thumb = *reproc = NULL;
+ *main_image = *thumb_image = NULL;
+ // find snapshot frame and thumnail frame
+ for (uint32_t i = 0; i < frame->num_bufs; i++) {
+ QCameraStream *pStream =
+ pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
+ *main= pStream;
+ *main_image = frame->bufs[i];
+ } else if (thumb_stream_needed &&
+ (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+ *thumb = pStream;
+ *thumb_image = frame->bufs[i];
+ }
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC) ) {
+ *reproc = pStream;
+ }
+ }
+ }
+
+ if (thumb_stream_needed && *thumb_image == NULL && reproc_frame != NULL) {
+ QCameraChannel *pSrcReprocChannel = NULL;
+ pSrcReprocChannel = m_parent->getChannelByHandle(reproc_frame->ch_id);
+ if (pSrcReprocChannel != NULL) {
+ // find thumbnail frame
+ for (uint32_t i = 0; i < reproc_frame->num_bufs; i++) {
+ QCameraStream *pStream =
+ pSrcReprocChannel->getStreamByHandle(
+ reproc_frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+ pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+ *thumb = pStream;
+ *thumb_image = reproc_frame->bufs[i];
+ }
+ }
+ }
+ }
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION : syncStreamParams
+*
+* DESCRIPTION: Query the runtime parameters of all streams included
+* in the main and reprocessed frames
+*
+* PARAMETERS :
+* @frame : Main image super buffer
+* @reproc_frame : Image supper buffer that got processed
+*
+* RETURN : int32_t type of status
+* NO_ERROR -- success
+* none-zero failure code
+*==========================================================================*/
+int32_t QCameraPostProcessor::syncStreamParams(mm_camera_super_buf_t *frame,
+ mm_camera_super_buf_t *reproc_frame)
+{
+ QCameraStream *reproc_stream = NULL;
+ QCameraStream *main_stream = NULL;
+ QCameraStream *thumb_stream = NULL;
+ mm_camera_buf_def_t *main_frame = NULL;
+ mm_camera_buf_def_t *thumb_frame = NULL;
+ int32_t ret = NO_ERROR;
+
+ ret = queryStreams(&main_stream,
+ &thumb_stream,
+ &reproc_stream,
+ &main_frame,
+ &thumb_frame,
+ frame,
+ reproc_frame);
+ if (NO_ERROR != ret) {
+ LOGE("Camera streams query from input frames failed %d",
+ ret);
+ return ret;
+ }
+
+ if (NULL != main_stream) {
+ ret = main_stream->syncRuntimeParams();
+ if (NO_ERROR != ret) {
+ LOGE("Syncing of main stream runtime parameters failed %d",
+ ret);
+ return ret;
+ }
+ }
+
+ if (NULL != thumb_stream) {
+ ret = thumb_stream->syncRuntimeParams();
+ if (NO_ERROR != ret) {
+ LOGE("Syncing of thumb stream runtime parameters failed %d",
+ ret);
+ return ret;
+ }
+ }
+
+ if ((NULL != reproc_stream) && (reproc_stream != main_stream)) {
+ ret = reproc_stream->syncRuntimeParams();
+ if (NO_ERROR != ret) {
+ LOGE("Syncing of reproc stream runtime parameters failed %d",
+ ret);
+ return ret;
+ }
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : encodeData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ * mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ * @jpeg_job_data : ptr to a struct saving job related information
+ * @needNewSess : flag to indicate if a new jpeg encoding session need
+ * to be created. After creation, this flag will be toggled
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+ uint8_t &needNewSess)
+{
+ LOGD("E");
+ int32_t ret = NO_ERROR;
+ mm_jpeg_job_t jpg_job;
+ uint32_t jobId = 0;
+ QCameraStream *reproc_stream = NULL;
+ QCameraStream *main_stream = NULL;
+ mm_camera_buf_def_t *main_frame = NULL;
+ QCameraStream *thumb_stream = NULL;
+ mm_camera_buf_def_t *thumb_frame = NULL;
+ mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
+ cam_rect_t crop;
+ cam_stream_parm_buffer_t param;
+ cam_stream_img_prop_t imgProp;
+
+ // find channel
+ QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+ // check reprocess channel if not found
+ if (pChannel == NULL) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+
+ if (pChannel == NULL) {
+ LOGE("No corresponding channel (ch_id = %d) exist, return here",
+ recvd_frame->ch_id);
+ return BAD_VALUE;
+ }
+
+ const uint32_t jpeg_rotation = m_parent->mParameters.getJpegRotation();
+
+ ret = queryStreams(&main_stream,
+ &thumb_stream,
+ &reproc_stream,
+ &main_frame,
+ &thumb_frame,
+ recvd_frame,
+ jpeg_job_data->src_reproc_frame);
+ if (NO_ERROR != ret) {
+ return ret;
+ }
+
+ if(NULL == main_frame){
+ LOGE("Main frame is NULL");
+ return BAD_VALUE;
+ }
+
+ if(NULL == thumb_frame){
+ LOGD("Thumbnail frame does not exist");
+ }
+
+ QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
+ if (NULL == memObj) {
+ LOGE("Memeory Obj of main frame is NULL");
+ return NO_MEMORY;
+ }
+
+ // dump snapshot frame if enabled
+ m_parent->dumpFrameToFile(main_stream, main_frame,
+ QCAMERA_DUMP_FRM_SNAPSHOT, (char *)"CPP");
+
+ // send upperlayer callback for raw image
+ camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
+ if (NULL != m_parent->mDataCb &&
+ m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+ cbArg.data = mem;
+ cbArg.index = 0;
+ m_parent->m_cbNotifier.notifyCallback(cbArg);
+ }
+ if (NULL != m_parent->mNotifyCb &&
+ m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+ cbArg.ext1 = 0;
+ cbArg.ext2 = 0;
+ m_parent->m_cbNotifier.notifyCallback(cbArg);
+ }
+
+ if (mJpegClientHandle <= 0) {
+ LOGE("Error: bug here, mJpegClientHandle is 0");
+ return UNKNOWN_ERROR;
+ }
+
+ if (needNewSess) {
+ // create jpeg encoding session
+ mm_jpeg_encode_params_t encodeParam;
+ memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+ ret = getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
+ if (ret != NO_ERROR) {
+ LOGE("error getting encoding config");
+ return ret;
+ }
+ LOGH("[KPI Perf] : call jpeg create_session");
+ ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+ if (ret != NO_ERROR) {
+ LOGE("error creating a new jpeg encoding session");
+ return ret;
+ }
+ needNewSess = FALSE;
+ }
+ // Fill in new job
+ memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+ jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+ jpg_job.encode_job.session_id = mJpegSessionId;
+ jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
+ jpg_job.encode_job.dst_index = 0;
+
+ if (mJpegMemOpt) {
+ jpg_job.encode_job.dst_index = jpg_job.encode_job.src_index;
+ } else if (mUseJpegBurst) {
+ jpg_job.encode_job.dst_index = -1;
+ }
+
+ // use src to reproc frame as work buffer; if src buf is not available
+ // jpeg interface will allocate work buffer
+ if (jpeg_job_data->src_reproc_frame != NULL) {
+ int32_t ret = NO_ERROR;
+ QCameraStream *main_stream = NULL;
+ mm_camera_buf_def_t *main_frame = NULL;
+ QCameraStream *thumb_stream = NULL;
+ mm_camera_buf_def_t *thumb_frame = NULL;
+ QCameraStream *reproc_stream = NULL;
+ mm_camera_buf_def_t *workBuf = NULL;
+ // Call queryStreams to fetch source of reproc frame
+ ret = queryStreams(&main_stream,
+ &thumb_stream,
+ &reproc_stream,
+ &main_frame,
+ &thumb_frame,
+ jpeg_job_data->src_reproc_frame,
+ NULL);
+
+ if ((NO_ERROR == ret) && ((workBuf = main_frame) != NULL)
+ && !m_parent->isLowPowerMode()) {
+ camera_memory_t *camWorkMem = NULL;
+ int workBufIndex = workBuf->buf_idx;
+ QCameraMemory *workMem = (QCameraMemory *)workBuf->mem_info;
+ if (workMem != NULL) {
+ camWorkMem = workMem->getMemory(workBufIndex, false);
+ }
+ if (camWorkMem != NULL && workMem != NULL) {
+ jpg_job.encode_job.work_buf.buf_size = camWorkMem->size;
+ jpg_job.encode_job.work_buf.buf_vaddr = (uint8_t *)camWorkMem->data;
+ jpg_job.encode_job.work_buf.fd = workMem->getFd(workBufIndex);
+ workMem->invalidateCache(workBufIndex);
+ }
+ }
+ }
+
+ cam_dimension_t src_dim;
+ memset(&src_dim, 0, sizeof(cam_dimension_t));
+ main_stream->getFrameDimension(src_dim);
+
+ bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
+ bool img_feature_enabled =
+ m_parent->mParameters.isUbiFocusEnabled() ||
+ m_parent->mParameters.isUbiRefocus() ||
+ m_parent->mParameters.isChromaFlashEnabled() ||
+ m_parent->mParameters.isOptiZoomEnabled() ||
+ m_parent->mParameters.isStillMoreEnabled();
+
+ LOGH("Crop needed %d", img_feature_enabled);
+ crop.left = 0;
+ crop.top = 0;
+ crop.height = src_dim.height;
+ crop.width = src_dim.width;
+
+ param = main_stream->getOutputCrop();
+ for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
+ if (param.outputCrop.crop_info[i].stream_id
+ == main_stream->getMyServerID()) {
+ crop = param.outputCrop.crop_info[i].crop;
+ main_stream->setCropInfo(crop);
+ }
+ }
+ if (img_feature_enabled) {
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+
+ param = main_stream->getImgProp();
+ imgProp = param.imgProp;
+ main_stream->setCropInfo(imgProp.crop);
+ crop = imgProp.crop;
+ thumb_stream = NULL; /* use thumbnail from main image */
+
+ if ((reproc_stream != NULL) && (m_DataMem == NULL) &&
+ m_parent->mParameters.isUbiRefocus()) {
+
+ QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
+ cam_misc_buf_t* refocusResult =
+ reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
+ uint32_t resultSize = refocusResult->header_size +
+ refocusResult->width * refocusResult->height;
+ camera_memory_t *dataMem = m_parent->mGetMemory(-1, resultSize,
+ 1, m_parent->mCallbackCookie);
+
+ LOGH("Refocus result header %u dims %dx%d",
+ resultSize, refocusResult->width, refocusResult->height);
+
+ if (dataMem && dataMem->data) {
+ memcpy(dataMem->data, refocusResult->data, resultSize);
+ //save mem pointer for depth map
+ m_DataMem = dataMem;
+ }
+ }
+ } else if ((reproc_stream != NULL) && (m_parent->mParameters.isTruePortraitEnabled())) {
+
+ QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
+ cam_misc_buf_t* tpResult =
+ reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
+ uint32_t tpMetaSize = tpResult->header_size + tpResult->width * tpResult->height;
+
+ LOGH("True portrait result header %d% dims dx%d",
+ tpMetaSize, tpResult->width, tpResult->height);
+
+ CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"tp", "bm", -1, "y",
+ &tpResult->data, tpMetaSize);
+ }
+
+ cam_dimension_t dst_dim;
+
+ if (hdr_output_crop && crop.height) {
+ dst_dim.height = crop.height;
+ } else {
+ dst_dim.height = src_dim.height;
+ }
+ if (hdr_output_crop && crop.width) {
+ dst_dim.width = crop.width;
+ } else {
+ dst_dim.width = src_dim.width;
+ }
+
+ // main dim
+ jpg_job.encode_job.main_dim.src_dim = src_dim;
+ jpg_job.encode_job.main_dim.dst_dim = dst_dim;
+ jpg_job.encode_job.main_dim.crop = crop;
+
+ // get 3a sw version info
+ cam_q3a_version_t sw_version =
+ m_parent->getCamHalCapabilities()->q3a_version;
+
+ // get exif data
+ QCameraExif *pJpegExifObj = m_parent->getExifData();
+ jpeg_job_data->pJpegExifObj = pJpegExifObj;
+ if (pJpegExifObj != NULL) {
+ jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
+ jpg_job.encode_job.exif_info.numOfEntries =
+ pJpegExifObj->getNumOfEntries();
+ jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
+ sw_version.major_version;
+ jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
+ sw_version.minor_version;
+ jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
+ sw_version.patch_version;
+ jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
+ sw_version.new_feature_des;
+ }
+
+ // set rotation only when no online rotation or offline pp rotation is done before
+ if (!m_parent->needRotationReprocess()) {
+ jpg_job.encode_job.rotation = jpeg_rotation;
+ }
+ LOGH("jpeg rotation is set to %d", jpg_job.encode_job.rotation);
+
+ // thumbnail dim
+ if (m_bThumbnailNeeded == TRUE) {
+ m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
+
+ if (thumb_stream == NULL) {
+ // need jpeg thumbnail, but no postview/preview stream exists
+ // we use the main stream/frame to encode thumbnail
+ thumb_stream = main_stream;
+ thumb_frame = main_frame;
+ }
+ if (m_parent->needRotationReprocess() &&
+ ((90 == jpeg_rotation) || (270 == jpeg_rotation))) {
+ // swap thumbnail dimensions
+ cam_dimension_t tmp_dim = jpg_job.encode_job.thumb_dim.dst_dim;
+ jpg_job.encode_job.thumb_dim.dst_dim.width = tmp_dim.height;
+ jpg_job.encode_job.thumb_dim.dst_dim.height = tmp_dim.width;
+ }
+
+ memset(&src_dim, 0, sizeof(cam_dimension_t));
+ thumb_stream->getFrameDimension(src_dim);
+ jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+
+ // crop is the same if frame is the same
+ if (thumb_frame != main_frame) {
+ crop.left = 0;
+ crop.top = 0;
+ crop.height = src_dim.height;
+ crop.width = src_dim.width;
+
+ param = thumb_stream->getOutputCrop();
+ for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
+ if (param.outputCrop.crop_info[i].stream_id
+ == thumb_stream->getMyServerID()) {
+ crop = param.outputCrop.crop_info[i].crop;
+ thumb_stream->setCropInfo(crop);
+ }
+ }
+ }
+
+
+ jpg_job.encode_job.thumb_dim.crop = crop;
+ if (thumb_frame != NULL) {
+ jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
+ }
+ LOGI("Thumbnail idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
+ jpg_job.encode_job.thumb_index,
+ jpg_job.encode_job.thumb_dim.src_dim.width,
+ jpg_job.encode_job.thumb_dim.src_dim.height,
+ jpg_job.encode_job.thumb_dim.dst_dim.width,
+ jpg_job.encode_job.thumb_dim.dst_dim.height);
+ }
+
+ LOGI("Main image idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
+ jpg_job.encode_job.src_index,
+ jpg_job.encode_job.main_dim.src_dim.width,
+ jpg_job.encode_job.main_dim.src_dim.height,
+ jpg_job.encode_job.main_dim.dst_dim.width,
+ jpg_job.encode_job.main_dim.dst_dim.height);
+
+ if (thumb_frame != NULL) {
+ // dump thumbnail frame if enabled
+ m_parent->dumpFrameToFile(thumb_stream, thumb_frame, QCAMERA_DUMP_FRM_THUMBNAIL);
+ }
+
+ if (jpeg_job_data->metadata != NULL) {
+ // fill in meta data frame ptr
+ jpg_job.encode_job.p_metadata = jpeg_job_data->metadata;
+ }
+
+ jpg_job.encode_job.hal_version = CAM_HAL_V1;
+ m_parent->mExifParams.sensor_params.sens_type = m_parent->getSensorType();
+ jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
+ jpg_job.encode_job.cam_exif_params.debug_params =
+ (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
+ if (!jpg_job.encode_job.cam_exif_params.debug_params) {
+ LOGE("Out of Memory. Allocation failed for 3A debug exif params");
+ return NO_MEMORY;
+ }
+
+ jpg_job.encode_job.mobicat_mask = m_parent->mParameters.getMobicatMask();
+
+
+ if (NULL != jpg_job.encode_job.p_metadata && (jpg_job.encode_job.mobicat_mask > 0)) {
+
+ if (m_parent->mExifParams.debug_params) {
+ memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
+ m_parent->mExifParams.debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
+
+ /* Save a copy of mobicat params */
+ jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
+ jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
+
+ if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
+ jpg_job.encode_job.p_metadata->mobicat_aec_params =
+ jpg_job.encode_job.cam_exif_params.cam_3a_params;
+ }
+
+ /* Save a copy of 3A debug params */
+ jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
+ jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
+ jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
+
+ if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_ae_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_awb_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_af_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_asd_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
+ }
+ if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
+ jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
+ jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
+ }
+ }
+
+ }
+
+ /* Init the QTable */
+ for (int i = 0; i < QTABLE_MAX; i++) {
+ jpg_job.encode_job.qtable_set[i] = 0;
+ }
+
+ const cam_sync_related_sensors_event_info_t* related_cam_info =
+ m_parent->getRelatedCamSyncInfo();
+ if (related_cam_info->sync_control == CAM_SYNC_RELATED_SENSORS_ON &&
+ m_parent->getMpoComposition()) {
+ jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_MPO;
+ if (related_cam_info->type == CAM_TYPE_MAIN ) {
+ jpg_job.encode_job.multi_image_info.is_primary = TRUE;
+ LOGD("Encoding MPO Primary JPEG");
+ } else {
+ jpg_job.encode_job.multi_image_info.is_primary = FALSE;
+ LOGD("Encoding MPO Aux JPEG");
+ }
+ jpg_job.encode_job.multi_image_info.num_of_images = 2;
+ } else {
+ LOGD("Encoding Single JPEG");
+ jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
+ jpg_job.encode_job.multi_image_info.is_primary = FALSE;
+ jpg_job.encode_job.multi_image_info.num_of_images = 1;
+ }
+
+ LOGI("[KPI Perf] : PROFILE_JPEG_JOB_START");
+ ret = mJpegHandle.start_job(&jpg_job, &jobId);
+ if (jpg_job.encode_job.cam_exif_params.debug_params) {
+ free(jpg_job.encode_job.cam_exif_params.debug_params);
+ }
+ if (ret == NO_ERROR) {
+ // remember job info
+ jpeg_job_data->jobId = jobId;
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : processRawImageImpl
+ *
+ * DESCRIPTION: function to send raw image to upper layer
+ *
+ * PARAMETERS :
+ * @recvd_frame : frame to be encoded
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
+{
+ int32_t rc = NO_ERROR;
+
+ QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+ QCameraStream *pStream = NULL;
+ mm_camera_buf_def_t *frame = NULL;
+ // check reprocess channel if not found
+ if (pChannel == NULL) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+ if (pChannel == NULL) {
+ LOGE("No corresponding channel (ch_id = %d) exist, return here",
+ recvd_frame->ch_id);
+ return BAD_VALUE;
+ }
+
+ // find snapshot frame
+ for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+ QCameraStream *pCurStream =
+ pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+ if (pCurStream != NULL) {
+ if (pCurStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ pCurStream->isTypeOf(CAM_STREAM_TYPE_RAW) ||
+ pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
+ pStream = pCurStream;
+ frame = recvd_frame->bufs[i];
+ break;
+ }
+ }
+ }
+
+ if ( NULL == frame ) {
+ LOGE("No valid raw buffer");
+ return BAD_VALUE;
+ }
+
+ QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
+ bool zslChannelUsed = m_parent->isZSLMode() &&
+ ( pChannel != mPPChannels[0] );
+ camera_memory_t *raw_mem = NULL;
+
+ if (rawMemObj != NULL) {
+ if (zslChannelUsed) {
+ raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
+ } else {
+ raw_mem = m_parent->mGetMemory(-1,
+ frame->frame_len,
+ 1,
+ m_parent->mCallbackCookie);
+ if (NULL == raw_mem) {
+ LOGE("Not enough memory for RAW cb ");
+ return NO_MEMORY;
+ }
+ memcpy(raw_mem->data, frame->buffer, frame->frame_len);
+ }
+ }
+
+ if (NULL != rawMemObj && NULL != raw_mem) {
+ // dump frame into file
+ if (frame->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ // for YUV422 NV16 case
+ m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_SNAPSHOT);
+ } else {
+ //Received RAW snapshot taken notification
+ m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_RAW);
+
+ if(true == m_parent->m_bIntRawEvtPending) {
+ //Sending RAW snapshot taken notification to HAL
+ memset(&m_dst_dim, 0, sizeof(m_dst_dim));
+ pStream->getFrameDimension(m_dst_dim);
+ pthread_mutex_lock(&m_parent->m_int_lock);
+ pthread_cond_signal(&m_parent->m_int_cond);
+ pthread_mutex_unlock(&m_parent->m_int_lock);
+ raw_mem->release(raw_mem);
+ return rc;
+ }
+ }
+
+ // send data callback / notify for RAW_IMAGE
+ if (NULL != m_parent->mDataCb &&
+ m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+ cbArg.data = raw_mem;
+ cbArg.index = 0;
+ m_parent->m_cbNotifier.notifyCallback(cbArg);
+ }
+ if (NULL != m_parent->mNotifyCb &&
+ m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+ qcamera_callback_argm_t cbArg;
+ memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+ cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+ cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+ cbArg.ext1 = 0;
+ cbArg.ext2 = 0;
+ m_parent->m_cbNotifier.notifyCallback(cbArg);
+ }
+
+ if ((m_parent->mDataCb != NULL) &&
+ m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
+ qcamera_release_data_t release_data;
+ memset(&release_data, 0, sizeof(qcamera_release_data_t));
+ if ( zslChannelUsed ) {
+ release_data.frame = recvd_frame;
+ } else {
+ release_data.data = raw_mem;
+ }
+ rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+ raw_mem,
+ 0,
+ NULL,
+ &release_data);
+ } else {
+ raw_mem->release(raw_mem);
+ }
+ } else {
+ LOGE("Cannot get raw mem");
+ rc = UNKNOWN_ERROR;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : dataSaveRoutine
+ *
+ * DESCRIPTION: data saving routine
+ *
+ * PARAMETERS :
+ * @data : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataSaveRoutine(void *data)
+{
+ int running = 1;
+ int ret;
+ uint8_t is_active = FALSE;
+ QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+ QCameraCmdThread *cmdThread = &pme->m_saveProcTh;
+ cmdThread->setName("CAM_JpegSave");
+ char saveName[PROPERTY_VALUE_MAX];
+
+ LOGH("E");
+ do {
+ do {
+ ret = cam_sem_wait(&cmdThread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ // we got notified about new cmd avail in cmd queue
+ camera_cmd_type_t cmd = cmdThread->getCmd();
+ switch (cmd) {
+ case CAMERA_CMD_TYPE_START_DATA_PROC:
+ LOGH("start data proc");
+ is_active = TRUE;
+ pme->m_inputSaveQ.init();
+ break;
+ case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+ {
+ LOGH("stop data proc");
+ is_active = FALSE;
+
+ // flush input save Queue
+ pme->m_inputSaveQ.flush();
+
+ // signal cmd is completed
+ cam_sem_post(&cmdThread->sync_sem);
+ }
+ break;
+ case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+ {
+ LOGH("Do next job, active is %d", is_active);
+
+ qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) pme->m_inputSaveQ.dequeue();
+ if (job_data == NULL) {
+ LOGE("Invalid jpeg event data");
+ continue;
+ }
+ //qcamera_jpeg_data_t *jpeg_job =
+ // (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue(false);
+ //uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;
+ uint32_t frame_idx = 75;
+
+ pme->m_ongoingJpegQ.flushNodes(matchJobId, (void*)&job_data->jobId);
+
+ LOGH("[KPI Perf] : jpeg job %d", job_data->jobId);
+
+ if (is_active == TRUE) {
+ memset(saveName, '\0', sizeof(saveName));
+ snprintf(saveName,
+ sizeof(saveName),
+ QCameraPostProcessor::STORE_LOCATION,
+ pme->mSaveFrmCnt);
+
+ int file_fd = open(saveName, O_RDWR | O_CREAT, 0655);
+ if (file_fd >= 0) {
+ ssize_t written_len = write(file_fd, job_data->out_data.buf_vaddr,
+ job_data->out_data.buf_filled_len);
+ if ((ssize_t)job_data->out_data.buf_filled_len != written_len) {
+ LOGE("Failed save complete data %d bytes "
+ "written instead of %d bytes!",
+ written_len,
+ job_data->out_data.buf_filled_len);
+ } else {
+ LOGH("written number of bytes %d\n",
+ written_len);
+ }
+
+ close(file_fd);
+ } else {
+ LOGE("fail t open file for saving");
+ }
+ pme->mSaveFrmCnt++;
+
+ camera_memory_t* jpeg_mem = pme->m_parent->mGetMemory(-1,
+ strlen(saveName),
+ 1,
+ pme->m_parent->mCallbackCookie);
+ if (NULL == jpeg_mem) {
+ ret = NO_MEMORY;
+ LOGE("getMemory for jpeg, ret = NO_MEMORY");
+ goto end;
+ }
+ memcpy(jpeg_mem->data, saveName, strlen(saveName));
+
+ LOGH("Calling upperlayer callback to store JPEG image");
+ qcamera_release_data_t release_data;
+ memset(&release_data, 0, sizeof(qcamera_release_data_t));
+ release_data.data = jpeg_mem;
+ release_data.unlinkFile = true;
+ LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
+ ret = pme->sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+ jpeg_mem,
+ 0,
+ NULL,
+ &release_data,
+ frame_idx);
+ }
+
+end:
+ free(job_data);
+ }
+ break;
+ case CAMERA_CMD_TYPE_EXIT:
+ LOGH("save thread exit");
+ running = 0;
+ break;
+ default:
+ break;
+ }
+ } while (running);
+ LOGH("X");
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : dataProcessRoutine
+ *
+ * DESCRIPTION: data process routine that handles input data either from input
+ * Jpeg Queue to do jpeg encoding, or from input PP Queue to do
+ * reprocess.
+ *
+ * PARAMETERS :
+ * @data : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataProcessRoutine(void *data)
+{
+ int running = 1;
+ int ret;
+ uint8_t is_active = FALSE;
+ QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+ QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
+ cmdThread->setName("CAM_DataProc");
+
+ LOGH("E");
+ do {
+ do {
+ ret = cam_sem_wait(&cmdThread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ // we got notified about new cmd avail in cmd queue
+ camera_cmd_type_t cmd = cmdThread->getCmd();
+ switch (cmd) {
+ case CAMERA_CMD_TYPE_START_DATA_PROC:
+ LOGH("start data proc");
+ is_active = TRUE;
+
+ pme->m_ongoingPPQ.init();
+ pme->m_inputJpegQ.init();
+ pme->m_inputPPQ.init();
+ pme->m_inputRawQ.init();
+
+ pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,
+ FALSE,
+ FALSE);
+
+ // signal cmd is completed
+ cam_sem_post(&cmdThread->sync_sem);
+
+ break;
+ case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+ {
+ LOGH("stop data proc");
+ is_active = FALSE;
+
+ pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
+ TRUE,
+ TRUE);
+ // cancel all ongoing jpeg jobs
+ qcamera_jpeg_data_t *jpeg_job =
+ (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+ while (jpeg_job != NULL) {
+ pme->mJpegHandle.abort_job(jpeg_job->jobId);
+
+ pme->releaseJpegJobData(jpeg_job);
+ free(jpeg_job);
+
+ jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+ }
+
+ // destroy jpeg encoding session
+ if ( 0 < pme->mJpegSessionId ) {
+ pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
+ pme->mJpegSessionId = 0;
+ }
+
+ // free jpeg out buf and exif obj
+ FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMem,
+ pme->m_JpegOutputMemCount);
+
+ if (pme->m_pJpegExifObj != NULL) {
+ delete pme->m_pJpegExifObj;
+ pme->m_pJpegExifObj = NULL;
+ }
+
+ // flush ongoing postproc Queue
+ pme->m_ongoingPPQ.flush();
+
+ // flush input jpeg Queue
+ pme->m_inputJpegQ.flush();
+
+ // flush input Postproc Queue
+ pme->m_inputPPQ.flush();
+
+ // flush input raw Queue
+ pme->m_inputRawQ.flush();
+
+ // signal cmd is completed
+ cam_sem_post(&cmdThread->sync_sem);
+
+ pme->mNewJpegSessionNeeded = true;
+ }
+ break;
+ case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+ {
+ LOGH("Do next job, active is %d", is_active);
+ if (is_active == TRUE) {
+ qcamera_jpeg_data_t *jpeg_job =
+ (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+
+ if (NULL != jpeg_job) {
+ // To avoid any race conditions,
+ // sync any stream specific parameters here.
+ if (pme->m_parent->mParameters.isAdvCamFeaturesEnabled()) {
+ // Sync stream params, only if advanced features configured
+ // Reduces the latency for normal snapshot.
+ pme->syncStreamParams(jpeg_job->src_frame, NULL);
+ }
+
+ // add into ongoing jpeg job Q
+ if (pme->m_ongoingJpegQ.enqueue((void *)jpeg_job)) {
+ ret = pme->encodeData(jpeg_job,
+ pme->mNewJpegSessionNeeded);
+ if (NO_ERROR != ret) {
+ // dequeue the last one
+ pme->m_ongoingJpegQ.dequeue(false);
+ pme->releaseJpegJobData(jpeg_job);
+ free(jpeg_job);
+ jpeg_job = NULL;
+ pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ }
+ } else {
+ LOGW("m_ongoingJpegQ is not active!!!");
+ pme->releaseJpegJobData(jpeg_job);
+ free(jpeg_job);
+ jpeg_job = NULL;
+ }
+ }
+
+
+ // process raw data if any
+ mm_camera_super_buf_t *super_buf =
+ (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+
+ if (NULL != super_buf) {
+ //play shutter sound
+ pme->m_parent->playShutter();
+ ret = pme->processRawImageImpl(super_buf);
+ if (NO_ERROR != ret) {
+ pme->releaseSuperBuf(super_buf);
+ free(super_buf);
+ pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ }
+ }
+
+ ret = pme->doReprocess();
+ if (NO_ERROR != ret) {
+ pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+ } else {
+ ret = pme->stopCapture();
+ }
+
+ } else {
+ // not active, simply return buf and do no op
+ qcamera_jpeg_data_t *jpeg_data =
+ (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+ if (NULL != jpeg_data) {
+ pme->releaseJpegJobData(jpeg_data);
+ free(jpeg_data);
+ }
+ mm_camera_super_buf_t *super_buf =
+ (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+ if (NULL != super_buf) {
+ pme->releaseSuperBuf(super_buf);
+ free(super_buf);
+ }
+
+ // flush input Postproc Queue
+ pme->m_inputPPQ.flush();
+ }
+ }
+ break;
+ case CAMERA_CMD_TYPE_EXIT:
+ running = 0;
+ break;
+ default:
+ break;
+ }
+ } while (running);
+ LOGH("X");
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : doReprocess
+ *
+ * DESCRIPTION: Trigger channel reprocessing
+ *
+ * PARAMETERS :None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::doReprocess()
+{
+ int32_t ret = NO_ERROR;
+ QCameraChannel *m_pSrcChannel = NULL;
+ QCameraStream *pMetaStream = NULL;
+ uint8_t meta_buf_index = 0;
+ mm_camera_buf_def_t *meta_buf = NULL;
+ mm_camera_super_buf_t *ppInputFrame = NULL;
+
+ qcamera_pp_data_t *ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.peek();
+ if ((ppreq_job == NULL) || (ppreq_job->src_frame == NULL)) {
+ return ret;
+ }
+
+ if (!validatePostProcess(ppreq_job->src_frame)) {
+ return ret;
+ }
+
+ ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.dequeue();
+ if (ppreq_job == NULL || ppreq_job->src_frame == NULL ||
+ ppreq_job->src_reproc_frame == NULL) {
+ return ret;
+ }
+
+ mm_camera_super_buf_t *src_frame = ppreq_job->src_frame;
+ mm_camera_super_buf_t *src_reproc_frame = ppreq_job->src_reproc_frame;
+ int8_t mCurReprocCount = ppreq_job->reprocCount;
+ int8_t mCurChannelIdx = ppreq_job->ppChannelIndex;
+
+ LOGD("frame = %p src_frame = %p mCurReprocCount = %d mCurChannelIdx = %d",
+ src_frame,src_reproc_frame,mCurReprocCount, mCurChannelIdx);
+
+ if ((m_parent->mParameters.getManualCaptureMode() >=
+ CAM_MANUAL_CAPTURE_TYPE_3) && (mCurChannelIdx == 0)) {
+ ppInputFrame = src_reproc_frame;
+ } else {
+ ppInputFrame = src_frame;
+ }
+
+ if (mPPChannelCount >= CAM_PP_CHANNEL_MAX) {
+ LOGE("invalid channel count");
+ return UNKNOWN_ERROR;
+ }
+
+ // find meta data stream and index of meta data frame in the superbuf
+ for (int8_t j = 0; j < mPPChannelCount; j++) {
+ /*First search in src buffer for any offline metadata */
+ for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
+ QCameraStream *pStream = mPPChannels[j]->getStreamByHandle(
+ src_frame->bufs[i]->stream_id);
+ if (pStream != NULL && pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ meta_buf_index = (uint8_t) src_frame->bufs[i]->buf_idx;
+ pMetaStream = pStream;
+ meta_buf = src_frame->bufs[i];
+ break;
+ }
+ }
+
+ if ((pMetaStream != NULL) && (meta_buf != NULL)) {
+ LOGD("Found Offline stream metadata = %d",
+ (int)meta_buf_index);
+ break;
+ }
+ }
+
+ if ((pMetaStream == NULL) && (meta_buf == NULL)) {
+ for (int8_t j = 0; j < mPPChannelCount; j++) {
+ m_pSrcChannel = mPPChannels[j]->getSrcChannel();
+ if (m_pSrcChannel == NULL)
+ continue;
+ for (uint32_t i = 0; i < src_reproc_frame->num_bufs; i++) {
+ QCameraStream *pStream =
+ m_pSrcChannel->getStreamByHandle(
+ src_reproc_frame->bufs[i]->stream_id);
+ if (pStream != NULL && pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+ meta_buf_index = (uint8_t) src_reproc_frame->bufs[i]->buf_idx;
+ pMetaStream = pStream;
+ meta_buf = src_reproc_frame->bufs[i];
+ break;
+ }
+ }
+ if ((pMetaStream != NULL) && (meta_buf != NULL)) {
+ LOGD("Found Meta data info for reprocessing index = %d",
+ (int)meta_buf_index);
+ break;
+ }
+ }
+ }
+
+ if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
+ // No need to sync stream params, if none of the advanced features configured
+ // Reduces the latency for normal snapshot.
+ syncStreamParams(src_frame, src_reproc_frame);
+ }
+ if (mPPChannels[mCurChannelIdx] != NULL) {
+ // add into ongoing PP job Q
+ ppreq_job->reprocCount = (int8_t) (mCurReprocCount + 1);
+
+ if ((m_parent->isRegularCapture()) || (ppreq_job->offline_buffer)) {
+ m_bufCountPPQ++;
+ if (m_ongoingPPQ.enqueue((void *)ppreq_job)) {
+ pthread_mutex_lock(&m_reprocess_lock);
+ ret = mPPChannels[mCurChannelIdx]->doReprocessOffline(ppInputFrame,
+ meta_buf, m_parent->mParameters);
+ if (ret != NO_ERROR) {
+ pthread_mutex_unlock(&m_reprocess_lock);
+ goto end;
+ }
+
+ if ((ppreq_job->offline_buffer) &&
+ (ppreq_job->offline_reproc_buf)) {
+ mPPChannels[mCurChannelIdx]->doReprocessOffline(
+ ppreq_job->offline_reproc_buf, meta_buf);
+ }
+ pthread_mutex_unlock(&m_reprocess_lock);
+ } else {
+ LOGW("m_ongoingPPQ is not active!!!");
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ } else {
+ m_bufCountPPQ++;
+ if (!m_ongoingPPQ.enqueue((void *)ppreq_job)) {
+ LOGW("m_ongoingJpegQ is not active!!!");
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+
+ int32_t numRequiredPPQBufsForSingleOutput = (int32_t)
+ m_parent->mParameters.getNumberInBufsForSingleShot();
+
+ if (m_bufCountPPQ % numRequiredPPQBufsForSingleOutput == 0) {
+ int32_t extra_pp_job_count =
+ m_parent->mParameters.getNumberOutBufsForSingleShot() -
+ m_parent->mParameters.getNumberInBufsForSingleShot();
+
+ for (int32_t i = 0; i < extra_pp_job_count; i++) {
+ qcamera_pp_data_t *extra_pp_job =
+ (qcamera_pp_data_t *)calloc(1, sizeof(qcamera_pp_data_t));
+ if (!extra_pp_job) {
+ LOGE("no mem for qcamera_pp_data_t");
+ ret = NO_MEMORY;
+ break;
+ }
+ extra_pp_job->reprocCount = ppreq_job->reprocCount;
+ if (!m_ongoingPPQ.enqueue((void *)extra_pp_job)) {
+ LOGW("m_ongoingJpegQ is not active!!!");
+ releaseOngoingPPData(extra_pp_job, this);
+ free(extra_pp_job);
+ extra_pp_job = NULL;
+ goto end;
+ }
+ }
+ }
+
+ ret = mPPChannels[mCurChannelIdx]->doReprocess(ppInputFrame,
+ m_parent->mParameters, pMetaStream, meta_buf_index);
+ }
+ } else {
+ LOGE("Reprocess channel is NULL");
+ ret = UNKNOWN_ERROR;
+ }
+
+end:
+ if (ret != NO_ERROR) {
+ releaseOngoingPPData(ppreq_job, this);
+ if (ppreq_job != NULL) {
+ free(ppreq_job);
+ ppreq_job = NULL;
+ }
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getReprocChannel
+ *
+ * DESCRIPTION: Returns reprocessing channel handle
+ *
+ * PARAMETERS : index for reprocessing array
+ *
+ * RETURN : QCameraReprocessChannel * type of pointer
+ NULL if no reprocessing channel
+ *==========================================================================*/
+QCameraReprocessChannel * QCameraPostProcessor::getReprocChannel(uint8_t index)
+{
+ if (index >= mPPChannelCount) {
+ LOGE("Invalid index value");
+ return NULL;
+ }
+ return mPPChannels[index];
+}
+
+/*===========================================================================
+ * FUNCTION : stopCapture
+ *
+ * DESCRIPTION: Trigger image capture stop
+ *
+ * PARAMETERS :
+ * None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stopCapture()
+{
+ int rc = NO_ERROR;
+
+ if (m_parent->isRegularCapture()) {
+ rc = m_parent->processAPI(
+ QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegPaddingReq
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ * @padding_info : jpeg specific padding requirement
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
+{
+ // TODO: hardcode for now, needs to query from mm-jpeg-interface
+ padding_info.width_padding = CAM_PAD_NONE;
+ padding_info.height_padding = CAM_PAD_TO_16;
+ padding_info.plane_padding = CAM_PAD_TO_WORD;
+ padding_info.offset_info.offset_x = 0;
+ padding_info.offset_info.offset_y = 0;
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setYUVFrameInfo
+ *
+ * DESCRIPTION: set Raw YUV frame data info for up-layer
+ *
+ * PARAMETERS :
+ * @frame : process frame received from mm-camera-interface
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * NOTE : currently we return frame len, y offset, cbcr offset and frame format
+ *==========================================================================*/
+int32_t QCameraPostProcessor::setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame)
+{
+ QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+ // check reprocess channel if not found
+ if (pChannel == NULL) {
+ for (int8_t i = 0; i < mPPChannelCount; i++) {
+ if ((mPPChannels[i] != NULL) &&
+ (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
+ pChannel = mPPChannels[i];
+ break;
+ }
+ }
+ }
+
+ if (pChannel == NULL) {
+ LOGE("No corresponding channel (ch_id = %d) exist, return here",
+ recvd_frame->ch_id);
+ return BAD_VALUE;
+ }
+
+ // find snapshot frame
+ for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+ QCameraStream *pStream =
+ pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+ if (pStream != NULL) {
+ if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+ pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+ //get the main frame, use stream info
+ cam_frame_len_offset_t frame_offset;
+ cam_dimension_t frame_dim;
+ cam_format_t frame_fmt;
+ const char *fmt_string;
+ pStream->getFrameDimension(frame_dim);
+ pStream->getFrameOffset(frame_offset);
+ pStream->getFormat(frame_fmt);
+ fmt_string = m_parent->mParameters.getFrameFmtString(frame_fmt);
+
+ int cbcr_offset = (int32_t)frame_offset.mp[0].len -
+ frame_dim.width * frame_dim.height;
+
+ LOGH("frame width=%d, height=%d, yoff=%d, cbcroff=%d, fmt_string=%s",
+ frame_dim.width, frame_dim.height, frame_offset.mp[0].offset, cbcr_offset, fmt_string);
+ return NO_ERROR;
+ }
+ }
+ }
+
+ return BAD_VALUE;
+}
+
+bool QCameraPostProcessor::matchJobId(void *data, void *, void *match_data)
+{
+ qcamera_jpeg_data_t * job = (qcamera_jpeg_data_t *) data;
+ uint32_t job_id = *((uint32_t *) match_data);
+ return job->jobId == job_id;
+}
+
+/*===========================================================================
+ * FUNCTION : getJpegMemory
+ *
+ * DESCRIPTION: buffer allocation function
+ * to pass to jpeg interface
+ *
+ * PARAMETERS :
+ * @out_buf : buffer descriptor struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraPostProcessor::getJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
+{
+ LOGH("Allocating jpeg out buffer of size: %d", out_buf->size);
+ QCameraPostProcessor *procInst = (QCameraPostProcessor *) out_buf->handle;
+ camera_memory_t *cam_mem = procInst->m_parent->mGetMemory(out_buf->fd, out_buf->size, 1U,
+ procInst->m_parent->mCallbackCookie);
+ out_buf->mem_hdl = cam_mem;
+ out_buf->vaddr = cam_mem->data;
+
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseJpegMemory
+ *
+ * DESCRIPTION: release jpeg memory function
+ * to pass to jpeg interface, in case of abort
+ *
+ * PARAMETERS :
+ * @out_buf : buffer descriptor struct
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int QCameraPostProcessor::releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
+{
+ if (out_buf && out_buf->mem_hdl) {
+ LOGD("releasing jpeg out buffer of size: %d", out_buf->size);
+ camera_memory_t *cam_mem = (camera_memory_t*)out_buf->mem_hdl;
+ cam_mem->release(cam_mem);
+ out_buf->mem_hdl = NULL;
+ out_buf->vaddr = NULL;
+ return NO_ERROR;
+ }
+ return -1;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraExif
+ *
+ * DESCRIPTION: constructor of QCameraExif
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraExif::QCameraExif()
+ : m_nNumEntries(0)
+{
+ memset(m_Entries, 0, sizeof(m_Entries));
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraExif
+ *
+ * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraExif::~QCameraExif()
+{
+ for (uint32_t i = 0; i < m_nNumEntries; i++) {
+ switch (m_Entries[i].tag_entry.type) {
+ case EXIF_BYTE:
+ {
+ if (m_Entries[i].tag_entry.count > 1 &&
+ m_Entries[i].tag_entry.data._bytes != NULL) {
+ free(m_Entries[i].tag_entry.data._bytes);
+ m_Entries[i].tag_entry.data._bytes = NULL;
+ }
+ }
+ break;
+ case EXIF_ASCII:
+ {
+ if (m_Entries[i].tag_entry.data._ascii != NULL) {
+ free(m_Entries[i].tag_entry.data._ascii);
+ m_Entries[i].tag_entry.data._ascii = NULL;
+ }
+ }
+ break;
+ case EXIF_SHORT:
+ {
+ if (m_Entries[i].tag_entry.count > 1 &&
+ m_Entries[i].tag_entry.data._shorts != NULL) {
+ free(m_Entries[i].tag_entry.data._shorts);
+ m_Entries[i].tag_entry.data._shorts = NULL;
+ }
+ }
+ break;
+ case EXIF_LONG:
+ {
+ if (m_Entries[i].tag_entry.count > 1 &&
+ m_Entries[i].tag_entry.data._longs != NULL) {
+ free(m_Entries[i].tag_entry.data._longs);
+ m_Entries[i].tag_entry.data._longs = NULL;
+ }
+ }
+ break;
+ case EXIF_RATIONAL:
+ {
+ if (m_Entries[i].tag_entry.count > 1 &&
+ m_Entries[i].tag_entry.data._rats != NULL) {
+ free(m_Entries[i].tag_entry.data._rats);
+ m_Entries[i].tag_entry.data._rats = NULL;
+ }
+ }
+ break;
+ case EXIF_UNDEFINED:
+ {
+ if (m_Entries[i].tag_entry.data._undefined != NULL) {
+ free(m_Entries[i].tag_entry.data._undefined);
+ m_Entries[i].tag_entry.data._undefined = NULL;
+ }
+ }
+ break;
+ case EXIF_SLONG:
+ {
+ if (m_Entries[i].tag_entry.count > 1 &&
+ m_Entries[i].tag_entry.data._slongs != NULL) {
+ free(m_Entries[i].tag_entry.data._slongs);
+ m_Entries[i].tag_entry.data._slongs = NULL;
+ }
+ }
+ break;
+ case EXIF_SRATIONAL:
+ {
+ if (m_Entries[i].tag_entry.count > 1 &&
+ m_Entries[i].tag_entry.data._srats != NULL) {
+ free(m_Entries[i].tag_entry.data._srats);
+ m_Entries[i].tag_entry.data._srats = NULL;
+ }
+ }
+ break;
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : addEntry
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ * @tagid : exif tag ID
+ * @type : data type
+ * @count : number of data in uint of its type
+ * @data : input data ptr
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
+ exif_tag_type_t type,
+ uint32_t count,
+ void *data)
+{
+ int32_t rc = NO_ERROR;
+ if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+ LOGE("Number of entries exceeded limit");
+ return NO_MEMORY;
+ }
+
+ m_Entries[m_nNumEntries].tag_id = tagid;
+ m_Entries[m_nNumEntries].tag_entry.type = type;
+ m_Entries[m_nNumEntries].tag_entry.count = count;
+ m_Entries[m_nNumEntries].tag_entry.copy = 1;
+ switch (type) {
+ case EXIF_BYTE:
+ {
+ if (count > 1) {
+ uint8_t *values = (uint8_t *)malloc(count);
+ if (values == NULL) {
+ LOGE("No memory for byte array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, data, count);
+ m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
+ }
+ } else {
+ m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
+ }
+ }
+ break;
+ case EXIF_ASCII:
+ {
+ char *str = NULL;
+ str = (char *)malloc(count + 1);
+ if (str == NULL) {
+ LOGE("No memory for ascii string");
+ rc = NO_MEMORY;
+ } else {
+ memset(str, 0, count + 1);
+ memcpy(str, data, count);
+ m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
+ }
+ }
+ break;
+ case EXIF_SHORT:
+ {
+ uint16_t *exif_data = (uint16_t *)data;
+ if (count > 1) {
+ uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+ if (values == NULL) {
+ LOGE("No memory for short array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, exif_data, count * sizeof(uint16_t));
+ m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
+ }
+ } else {
+ m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
+ }
+ }
+ break;
+ case EXIF_LONG:
+ {
+ uint32_t *exif_data = (uint32_t *)data;
+ if (count > 1) {
+ uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+ if (values == NULL) {
+ LOGE("No memory for long array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, exif_data, count * sizeof(uint32_t));
+ m_Entries[m_nNumEntries].tag_entry.data._longs = values;
+ }
+ } else {
+ m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
+ }
+ }
+ break;
+ case EXIF_RATIONAL:
+ {
+ rat_t *exif_data = (rat_t *)data;
+ if (count > 1) {
+ rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+ if (values == NULL) {
+ LOGE("No memory for rational array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, exif_data, count * sizeof(rat_t));
+ m_Entries[m_nNumEntries].tag_entry.data._rats = values;
+ }
+ } else {
+ m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
+ }
+ }
+ break;
+ case EXIF_UNDEFINED:
+ {
+ uint8_t *values = (uint8_t *)malloc(count);
+ if (values == NULL) {
+ LOGE("No memory for undefined array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, data, count);
+ m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
+ }
+ }
+ break;
+ case EXIF_SLONG:
+ {
+ uint32_t *exif_data = (uint32_t *)data;
+ if (count > 1) {
+ int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+ if (values == NULL) {
+ LOGE("No memory for signed long array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, exif_data, count * sizeof(int32_t));
+ m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
+ }
+ } else {
+ m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
+ }
+ }
+ break;
+ case EXIF_SRATIONAL:
+ {
+ srat_t *exif_data = (srat_t *)data;
+ if (count > 1) {
+ srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+ if (values == NULL) {
+ LOGE("No memory for signed rational array");
+ rc = NO_MEMORY;
+ } else {
+ memcpy(values, exif_data, count * sizeof(srat_t));
+ m_Entries[m_nNumEntries].tag_entry.data._srats = values;
+ }
+ } else {
+ m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
+ }
+ }
+ break;
+ }
+
+ // Increase number of entries
+ m_nNumEntries++;
+ return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraPostProc.h b/camera/QCamera2/HAL/QCameraPostProc.h
new file mode 100644
index 0000000..5c56214
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraPostProc.h
@@ -0,0 +1,250 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_POSTPROC_H__
+#define __QCAMERA_POSTPROC_H__
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+#define MAX_JPEG_BURST 2
+#define CAM_PP_CHANNEL_MAX 8
+
+namespace qcamera {
+
+class QCameraExif;
+class QCamera2HardwareInterface;
+
+typedef struct {
+ uint32_t jobId; // job ID
+ uint32_t client_hdl; // handle of jpeg client (obtained when open jpeg)
+ mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel
+ //after done)
+ mm_camera_super_buf_t *src_reproc_frame; // original source
+ //frame for reproc if not NULL
+ metadata_buffer_t *metadata; // source frame metadata
+ bool reproc_frame_release; // false release original buffer, true don't release it
+ mm_camera_buf_def_t *src_reproc_bufs;
+ QCameraExif *pJpegExifObj;
+ uint8_t offline_buffer;
+ mm_camera_buf_def_t *offline_reproc_buf; //HAL processed buffer
+} qcamera_jpeg_data_t;
+
+
+typedef struct {
+ int8_t reprocCount;
+ mm_camera_super_buf_t *src_frame; // source frame that needs post process
+ mm_camera_super_buf_t *src_reproc_frame;// source frame (need to be
+ //returned back to kernel after done)
+}qcamera_pp_request_t;
+
+typedef struct {
+ uint32_t jobId; // job ID
+ int8_t reprocCount; //Current pass count
+ int8_t ppChannelIndex; //Reprocess channel object index
+ mm_camera_super_buf_t *src_frame;// source frame
+ bool reproc_frame_release; // false release original buffer
+ // true don't release it
+ mm_camera_buf_def_t *src_reproc_bufs;
+ mm_camera_super_buf_t *src_reproc_frame;// source frame (need to be
+ //returned back to kernel after done)
+ uint8_t offline_buffer;
+ mm_camera_buf_def_t *offline_reproc_buf; //HAL processed buffer
+} qcamera_pp_data_t;
+
+typedef struct {
+ uint32_t jobId; // job ID (obtained when start_jpeg_job)
+ jpeg_job_status_t status; // jpeg encoding status
+ mm_jpeg_output_t out_data; // ptr to jpeg output buf
+} qcamera_jpeg_evt_payload_t;
+
+typedef struct {
+ camera_memory_t * data; // ptr to data memory struct
+ mm_camera_super_buf_t * frame; // ptr to frame
+ QCameraMemory * streamBufs; //ptr to stream buffers
+ bool unlinkFile; // unlink any stored buffers on error
+} qcamera_release_data_t;
+
+typedef struct {
+ int32_t msg_type; // msg type of data notify
+ camera_memory_t * data; // ptr to data memory struct
+ unsigned int index; // index of the buf in the whole buffer
+ camera_frame_metadata_t *metadata; // ptr to meta data
+ qcamera_release_data_t release_data; // any data needs to be release after notify
+} qcamera_data_argm_t;
+
+#define MAX_EXIF_TABLE_ENTRIES 17
+class QCameraExif
+{
+public:
+ QCameraExif();
+ virtual ~QCameraExif();
+
+ int32_t addEntry(exif_tag_id_t tagid,
+ exif_tag_type_t type,
+ uint32_t count,
+ void *data);
+ uint32_t getNumOfEntries() {return m_nNumEntries;};
+ QEXIF_INFO_DATA *getEntries() {return m_Entries;};
+
+private:
+ QEXIF_INFO_DATA m_Entries[MAX_EXIF_TABLE_ENTRIES]; // exif tags for JPEG encoder
+ uint32_t m_nNumEntries; // number of valid entries
+};
+
+class QCameraPostProcessor
+{
+public:
+ QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl);
+ virtual ~QCameraPostProcessor();
+
+ int32_t init(jpeg_encode_callback_t jpeg_cb, void *user_data);
+ int32_t deinit();
+ int32_t start(QCameraChannel *pSrcChannel);
+ int32_t stop();
+ bool validatePostProcess(mm_camera_super_buf_t *frame);
+ int32_t processData(mm_camera_super_buf_t *frame);
+ int32_t processRawData(mm_camera_super_buf_t *frame);
+ int32_t processPPData(mm_camera_super_buf_t *frame);
+ int32_t processJpegEvt(qcamera_jpeg_evt_payload_t *evt);
+ int32_t getJpegPaddingReq(cam_padding_info_t &padding_info);
+ QCameraReprocessChannel * getReprocChannel(uint8_t index);
+ inline bool getJpegMemOpt() {return mJpegMemOpt;}
+ inline void setJpegMemOpt(bool val) {mJpegMemOpt = val;}
+ int32_t setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
+ mm_jpeg_mpo_ops_t* pJpegMpoHandle, uint32_t clientHandle);
+ int32_t createJpegSession(QCameraChannel *pSrcChannel);
+
+ int8_t getPPChannelCount() {return mPPChannelCount;};
+ mm_camera_buf_def_t *getOfflinePPInputBuffer(
+ mm_camera_super_buf_t *src_frame);
+ QCameraMemory *mOfflineDataBufs;
+
+private:
+ int32_t sendDataNotify(int32_t msg_type,
+ camera_memory_t *data,
+ uint8_t index,
+ camera_frame_metadata_t *metadata,
+ qcamera_release_data_t *release_data,
+ uint32_t super_buf_frame_idx = 0);
+ int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+ qcamera_jpeg_data_t *findJpegJobByJobId(uint32_t jobId);
+ mm_jpeg_color_format getColorfmtFromImgFmt(cam_format_t img_fmt);
+ mm_jpeg_format_t getJpegImgTypeFromImgFmt(cam_format_t img_fmt);
+ int32_t getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+ QCameraStream *main_stream,
+ QCameraStream *thumb_stream);
+ int32_t encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+ uint8_t &needNewSess);
+ int32_t queryStreams(QCameraStream **main,
+ QCameraStream **thumb,
+ QCameraStream **reproc,
+ mm_camera_buf_def_t **main_image,
+ mm_camera_buf_def_t **thumb_image,
+ mm_camera_super_buf_t *main_frame,
+ mm_camera_super_buf_t *reproc_frame);
+ int32_t syncStreamParams(mm_camera_super_buf_t *frame,
+ mm_camera_super_buf_t *reproc_frame);
+ void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+ void releaseSuperBuf(mm_camera_super_buf_t *super_buf,
+ cam_stream_type_t stream_type);
+ static void releaseNotifyData(void *user_data,
+ void *cookie,
+ int32_t cb_status);
+ void releaseJpegJobData(qcamera_jpeg_data_t *job);
+ static void releaseSaveJobData(void *data, void *user_data);
+ static void releaseRawData(void *data, void *user_data);
+ int32_t processRawImageImpl(mm_camera_super_buf_t *recvd_frame);
+
+ static void releaseJpegData(void *data, void *user_data);
+ static void releasePPInputData(void *data, void *user_data);
+ static void releaseOngoingPPData(void *data, void *user_data);
+
+ static void *dataProcessRoutine(void *data);
+ static void *dataSaveRoutine(void *data);
+
+ int32_t setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame);
+ static bool matchJobId(void *data, void *user_data, void *match_data);
+ static int getJpegMemory(omx_jpeg_ouput_buf_t *out_buf);
+ static int releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf);
+
+ int32_t doReprocess();
+ int32_t stopCapture();
+private:
+ QCamera2HardwareInterface *m_parent;
+ jpeg_encode_callback_t mJpegCB;
+ void * mJpegUserData;
+ mm_jpeg_ops_t mJpegHandle;
+ mm_jpeg_mpo_ops_t mJpegMpoHandle; // handle for mpo composition for dualcam
+ uint32_t mJpegClientHandle;
+ uint32_t mJpegSessionId;
+
+ void * m_pJpegOutputMem[MM_JPEG_MAX_BUF];
+ QCameraExif * m_pJpegExifObj;
+ uint32_t m_bThumbnailNeeded;
+
+ int8_t mPPChannelCount;
+ QCameraReprocessChannel *mPPChannels[CAM_PP_CHANNEL_MAX];
+
+ camera_memory_t * m_DataMem; // save frame mem pointer
+
+ int8_t m_bInited; // if postproc is inited
+
+ QCameraQueue m_inputPPQ; // input queue for postproc
+ QCameraQueue m_ongoingPPQ; // ongoing postproc queue
+ QCameraQueue m_inputJpegQ; // input jpeg job queue
+ QCameraQueue m_ongoingJpegQ; // ongoing jpeg job queue
+ QCameraQueue m_inputRawQ; // input raw job queue
+ QCameraQueue m_inputSaveQ; // input save job queue
+ QCameraCmdThread m_dataProcTh; // thread for data processing
+ QCameraCmdThread m_saveProcTh; // thread for storing buffers
+ uint32_t mSaveFrmCnt; // save frame counter
+ static const char *STORE_LOCATION; // path for storing buffers
+ bool mUseSaveProc; // use store thread
+ bool mUseJpegBurst; // use jpeg burst encoding mode
+ bool mJpegMemOpt;
+ uint32_t m_JpegOutputMemCount;
+ uint8_t mNewJpegSessionNeeded;
+ int32_t m_bufCountPPQ;
+ Vector<mm_camera_buf_def_t *> m_InputMetadata; // store input metadata buffers for AOST cases
+ size_t m_PPindex; // counter for each incoming AOST buffer
+ pthread_mutex_t m_reprocess_lock; // lock to ensure reprocess job is not freed early.
+
+public:
+ cam_dimension_t m_dst_dim;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_POSTPROC_H__ */
diff --git a/camera/QCamera2/HAL/QCameraStateMachine.cpp b/camera/QCamera2/HAL/QCameraStateMachine.cpp
new file mode 100644
index 0000000..f9e85b8
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStateMachine.cpp
@@ -0,0 +1,3867 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStateMachine"
+
+// System dependencies
+#include <utils/Errors.h>
+#include <stdio.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraStateMachine.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION : smEvtProcRoutine
+ *
+ * DESCRIPTION: Statemachine process thread routine to handle events
+ * in different state.
+ *
+ * PARAMETERS :
+ * @data : ptr to QCameraStateMachine object
+ *
+ * RETURN : none
+ *==========================================================================*/
+void *QCameraStateMachine::smEvtProcRoutine(void *data)
+{
+ int running = 1, ret;
+ QCameraStateMachine *pme = (QCameraStateMachine *)data;
+
+ LOGH("E");
+ do {
+ do {
+ ret = cam_sem_wait(&pme->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ // we got notified about new cmd avail in cmd queue
+ // first check API cmd queue
+ qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
+ if (node == NULL) {
+ // no API cmd, then check evt cmd queue
+ node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
+ }
+ if (node != NULL) {
+ switch (node->cmd) {
+ case QCAMERA_SM_CMD_TYPE_API:
+ pme->stateMachine(node->evt, node->evt_payload);
+ // API is in a way sync call, so evt_payload is managed by HWI
+ // no need to free payload for API
+ break;
+ case QCAMERA_SM_CMD_TYPE_EVT:
+ pme->stateMachine(node->evt, node->evt_payload);
+
+ // EVT is async call, so payload need to be free after use
+ free(node->evt_payload);
+ node->evt_payload = NULL;
+ break;
+ case QCAMERA_SM_CMD_TYPE_EXIT:
+ running = 0;
+ break;
+ default:
+ break;
+ }
+ free(node);
+ node = NULL;
+ }
+ } while (running);
+ LOGH("X");
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraStateMachine
+ *
+ * DESCRIPTION: constructor of QCameraStateMachine. Will start process thread
+ *
+ * PARAMETERS :
+ * @ctrl : ptr to HWI object
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
+ api_queue(),
+ evt_queue()
+{
+ m_parent = ctrl;
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ cmd_pid = 0;
+ cam_sem_init(&cmd_sem, 0);
+ pthread_create(&cmd_pid,
+ NULL,
+ smEvtProcRoutine,
+ this);
+ pthread_setname_np(cmd_pid, "CAM_stMachine");
+ m_bDelayPreviewMsgs = false;
+ m_DelayedMsgs = 0;
+ m_RestoreZSL = TRUE;
+ m_bPreviewCallbackNeeded = TRUE;
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraStateMachine
+ *
+ * DESCRIPTION: desctructor of QCameraStateMachine. Will stop process thread.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+QCameraStateMachine::~QCameraStateMachine()
+{
+ cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION : releaseThread
+ *
+ * DESCRIPTION: Sends an exit command and terminates the state machine thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraStateMachine::releaseThread()
+{
+ if (cmd_pid != 0) {
+ qcamera_sm_cmd_t *node =
+ (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(qcamera_sm_cmd_t));
+ node->cmd = QCAMERA_SM_CMD_TYPE_EXIT;
+
+ if (api_queue.enqueue((void *)node)) {
+ cam_sem_post(&cmd_sem);
+ } else {
+ free(node);
+ node = NULL;
+ }
+
+ /* wait until cmd thread exits */
+ if (pthread_join(cmd_pid, NULL) != 0) {
+ LOGW("pthread dead already\n");
+ }
+ }
+ cmd_pid = 0;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : applyDelayedMsgs
+ *
+ * DESCRIPTION: Enable if needed any delayed message types
+ *
+ * PARAMETERS : None
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::applyDelayedMsgs()
+{
+ int32_t rc = NO_ERROR;
+
+ if (m_bDelayPreviewMsgs && m_DelayedMsgs) {
+ rc = m_parent->enableMsgType(m_DelayedMsgs);
+ m_bDelayPreviewMsgs = false;
+ m_DelayedMsgs = 0;
+ } else if (m_bDelayPreviewMsgs) {
+ m_bDelayPreviewMsgs = false;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procAPI
+ *
+ * DESCRIPTION: process incoming API request from framework layer.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @api_payload : API payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
+ void *api_payload)
+{
+ qcamera_sm_cmd_t *node =
+ (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+ if (NULL == node) {
+ LOGE("No memory for qcamera_sm_cmd_t");
+ return NO_MEMORY;
+ }
+
+ memset(node, 0, sizeof(qcamera_sm_cmd_t));
+ node->cmd = QCAMERA_SM_CMD_TYPE_API;
+ node->evt = evt;
+ node->evt_payload = api_payload;
+ if (api_queue.enqueue((void *)node)) {
+ cam_sem_post(&cmd_sem);
+ return NO_ERROR;
+ } else {
+ LOGE("API enqueue failed API = %d", evt);
+ free(node);
+ return UNKNOWN_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : procEvt
+ *
+ * DESCRIPTION: process incoming envent from mm-camera-interface and
+ * mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @evt_payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvt(qcamera_sm_evt_enum_t evt,
+ void *evt_payload)
+{
+ qcamera_sm_cmd_t *node =
+ (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+ if (NULL == node) {
+ LOGE("No memory for qcamera_sm_cmd_t");
+ return NO_MEMORY;
+ }
+
+ memset(node, 0, sizeof(qcamera_sm_cmd_t));
+ node->cmd = QCAMERA_SM_CMD_TYPE_EVT;
+ node->evt = evt;
+ node->evt_payload = evt_payload;
+ if (evt_queue.enqueue((void *)node)) {
+ cam_sem_post(&cmd_sem);
+ return NO_ERROR;
+ } else {
+ LOGE("EVENT enqueue failed Event = %d", evt);
+ free(node);
+ return UNKNOWN_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : stateMachine
+ *
+ * DESCRIPTION: finite state machine entry function. Depends on state,
+ * incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
+{
+ int32_t rc = NO_ERROR;
+ LOGL("m_state %d, event (%d)", m_state, evt);
+ switch (m_state) {
+ case QCAMERA_SM_STATE_PREVIEW_STOPPED:
+ rc = procEvtPreviewStoppedState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_PREVIEW_READY:
+ rc = procEvtPreviewReadyState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_PREVIEWING:
+ rc = procEvtPreviewingState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+ rc = procEvtPrepareSnapshotState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_PIC_TAKING:
+ rc = procEvtPicTakingState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_RECORDING:
+ rc = procEvtRecordingState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+ rc = procEvtVideoPicTakingState(evt, payload);
+ break;
+ case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+ rc = procEvtPreviewPicTakingState(evt, payload);
+ break;
+ default:
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtPreviewStoppedState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_PREVIEW_STOPPED.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ {
+ rc = m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->enableMsgType(*((int32_t *)payload));
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->disableMsgType(*((int32_t *)payload));
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ {
+ m_parent->m_memoryPool.clear();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ m_parent->setNeedRestart(false);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ char* nullParams = (char *)malloc(1);
+ if (nullParams) {
+ memset(nullParams, 0, 1);
+ }
+ result.params = nullParams;
+ } else {
+ result.params = m_parent->getParameters();
+ }
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->preparePreview();
+ }
+ if (rc == NO_ERROR) {
+ //prepare preview success, move to ready state
+ m_state = QCAMERA_SM_STATE_PREVIEW_READY;
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else if (m_parent->mPreviewWindow == NULL) {
+ rc = m_parent->preparePreview();
+ if(rc == NO_ERROR) {
+ // preview window is not set yet, move to previewReady state
+ m_state = QCAMERA_SM_STATE_PREVIEW_READY;
+ } else {
+ LOGE("preparePreview failed");
+ }
+ } else {
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ } else {
+ // start preview success, move to previewing state
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ }
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->preparePreview();
+ }
+ if (rc == NO_ERROR) {
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ // no op needed here
+ LOGW("already in preview stopped state, do nothing");
+ result.status = NO_ERROR;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ result.status = NO_ERROR;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ rc = m_parent->release();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->dump(*((int *)payload));
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_RECORDING:
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ {
+ // no op needed here
+ LOGW("No ops for evt(%d) in state(%d)", evt, m_state);
+ result.status = NO_ERROR;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->cancelAutoFocus();
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ cam_evt->server_event_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ // No ops, but need to notify
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+ if (NO_ERROR != rc) {
+ LOGE("Param init deferred work failed");
+ } else {
+ rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
+ }
+ break;
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtPreviewReadyState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_PREVIEW_READY.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ {
+ m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+ if (m_parent->mPreviewWindow != NULL) {
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ rc = m_parent->enableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ rc = m_parent->disableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ {
+ LOGD("Stopping preview...");
+ // need restart preview for parameters to take effect
+ m_parent->unpreparePreview();
+ // Clear memory pools
+ m_parent->m_memoryPool.clear();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ // prepare preview again
+ rc = m_parent->preparePreview();
+ if (rc != NO_ERROR) {
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ }
+ m_parent->setNeedRestart(false);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ result.params = m_parent->getParameters();
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ {
+ // no ops here
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ {
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ // no ops here
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ {
+ if (m_parent->mPreviewWindow != NULL) {
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ }
+ // no ops here
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ m_parent->unpreparePreview();
+ rc = 0;
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 1;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ rc = 0;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->dump(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ {
+ rc = m_parent->autoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->cancelAutoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_RECORDING:
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ cam_evt->server_event_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ // No ops, but need to notify
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
+ break;
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtPreviewingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_PREVIEWING.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewingState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ {
+ // Error setting preview window during previewing
+ LOGE("Error!! cannot set preview window when preview is running");
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ int32_t enable_msgs = *((int32_t *)payload);
+ if (m_bDelayPreviewMsgs &&
+ (enable_msgs & CAMERA_MSG_PREVIEW_FRAME)) {
+ enable_msgs &= ~CAMERA_MSG_PREVIEW_FRAME;
+ m_DelayedMsgs = CAMERA_MSG_PREVIEW_FRAME;
+ }
+ rc = m_parent->enableMsgType(enable_msgs);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ int32_t disable_msgs = *((int32_t *)payload);
+ if (m_bDelayPreviewMsgs && m_DelayedMsgs) {
+ m_DelayedMsgs &= ~disable_msgs;
+ if (0 == m_DelayedMsgs) {
+ m_bDelayPreviewMsgs = false;
+ }
+ }
+ rc = m_parent->disableMsgType(disable_msgs);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int32_t msgs = *((int32_t *)payload);
+ int enabled = m_parent->msgTypeEnabled(msgs);
+ if (m_bDelayPreviewMsgs && m_DelayedMsgs) {
+ enabled |= (msgs & m_DelayedMsgs);
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ {
+ LOGD("Stopping preview...");
+ // stop preview
+ rc = m_parent->stopPreview();
+ // Clear memory pools
+ m_parent->m_memoryPool.clear();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ // start preview again
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ }
+ if (rc != NO_ERROR) {
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ }
+ }
+ m_parent->setNeedRestart(false);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ result.params = m_parent->getParameters();
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ {
+ // no ops here
+ LOGW("Already in preview ready state, no ops here");
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ {
+ // no ops here
+ LOGW("Already in previewing, no ops here to start preview");
+ applyDelayedMsgs();
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ rc = m_parent->stopPreview();
+ applyDelayedMsgs();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ {
+ applyDelayedMsgs();
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 1;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->dump(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ {
+ rc = m_parent->autoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->cancelAutoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ {
+ rc = m_parent->preStartRecording();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_RECORDING:
+ {
+ rc = m_parent->startRecording();
+ if (rc == NO_ERROR) {
+ // move state to recording state
+ m_state = QCAMERA_SM_STATE_RECORDING;
+ applyDelayedMsgs();
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ {
+ rc = m_parent->prepareHardwareForSnapshot(FALSE);
+ if (rc == NO_ERROR) {
+ // Do not signal API result in this case.
+ // Need to wait for snapshot done in metadta.
+ m_state = QCAMERA_SM_STATE_PREPARE_SNAPSHOT;
+ applyDelayedMsgs();
+ } else {
+ // Do not change state in this case.
+ LOGE("prepareHardwareForSnapshot failed %d",
+ rc);
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ {
+ rc = m_parent->preTakePicture();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ {
+ LOGL("QCAMERA_SM_EVT_TAKE_PICTURE ");
+ if ( m_parent->mParameters.getRecordingHintValue() == true) {
+ m_parent->stopPreview();
+ m_parent->mParameters.updateRecordingHintValue(FALSE);
+ // start preview again
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ }
+ }
+ }
+ if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+ bool restartPreview = m_parent->isPreviewRestartEnabled();
+ if ((restartPreview) && (m_parent->mParameters.getManualCaptureMode()
+ >= CAM_MANUAL_CAPTURE_TYPE_3)) {
+ /* stop preview and disable ZSL now */
+ m_parent->stopPreview();
+ m_parent->mParameters.updateZSLModeValue(FALSE);
+ m_RestoreZSL = TRUE;
+ m_bDelayPreviewMsgs = true;
+ m_state = QCAMERA_SM_STATE_PIC_TAKING;
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEW_PIC_TAKING;
+ m_bDelayPreviewMsgs = true;
+ }
+
+ rc = m_parent->takePicture();
+ if (rc != NO_ERROR) {
+ // move state to previewing state
+ m_parent->unconfigureAdvancedCapture();
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ if (!(m_parent->isRetroPicture()) || (rc != NO_ERROR)) {
+ LOGD("signal API result, m_state = %d",
+ m_state);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ } else {
+ m_state = QCAMERA_SM_STATE_PIC_TAKING;
+ rc = m_parent->takePicture();
+ if (rc != NO_ERROR) {
+ int32_t temp_rc = NO_ERROR;
+ // move state to preview stopped state
+ m_parent->unconfigureAdvancedCapture();
+ m_parent->stopPreview();
+ // start preview again
+ temp_rc = m_parent->preparePreview();
+ if (temp_rc == NO_ERROR) {
+ temp_rc = m_parent->startPreview();
+ if (temp_rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ }
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+ m_bPreviewNeedsRestart =
+ (QCAMERA_SM_EVT_RESTART_PERVIEW == cmd_payload->arg1);
+ m_bPreviewDelayedRestart =
+ (QCAMERA_SM_EVT_DELAYED_RESTART == cmd_payload->arg2);
+
+#ifndef VANILLA_HAL
+ if ((CAMERA_CMD_LONGSHOT_ON == cmd_payload->cmd) &&
+ (m_bPreviewNeedsRestart)) {
+ m_parent->stopPreview();
+ // Clear memory pools
+ m_parent->m_memoryPool.clear();
+
+ if (!m_bPreviewDelayedRestart) {
+ // start preview again
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ }
+ }
+ }
+ }
+#endif
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND_RESTART:
+ {
+#ifndef VANILLA_HAL
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ if ((CAMERA_CMD_LONGSHOT_ON == cmd_payload->cmd) &&
+ (m_bPreviewNeedsRestart) &&
+ (m_bPreviewDelayedRestart)) {
+ // start preview again
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ }
+ }
+ }
+#endif
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+ rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+ break;
+ case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+ rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+ rc = m_parent->processHistogramStats(internal_evt->stats_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_CROP_INFO:
+ rc = m_parent->processZoomEvent(internal_evt->crop_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+ rc = m_parent->processASDUpdate(internal_evt->asd_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+ rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+ rc = m_parent->processAEInfo(internal_evt->ae_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+ rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+ break;
+ case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+ rc = m_parent->processHDRData(internal_evt->hdr_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+ rc = m_parent->processRetroAECUnlock();
+ break;
+ case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+ rc = m_parent->processZSLCaptureDone();
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ internal_evt->evt_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ default:
+ LOGW("no handling for server evt (%d) at this state",
+ cam_evt->server_event_type);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ // No ops, but need to notify
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ {
+ m_parent->stopPreview();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ {
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ rc = m_parent->startPreview();
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtPrepareSnapshotState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_PREPARE_SNAPSHOT.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ case QCAMERA_SM_EVT_DUMP:
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_RECORDING:
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+ rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+ m_parent->processPrepSnapshotDoneEvent(internal_evt->prep_snapshot_state);
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+ result.status = NO_ERROR;
+ result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ break;
+ case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+ rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+ rc = m_parent->processHistogramStats(internal_evt->stats_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_CROP_INFO:
+ rc = m_parent->processZoomEvent(internal_evt->crop_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+ rc = m_parent->processASDUpdate(internal_evt->asd_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+ rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+ rc = m_parent->processAEInfo(internal_evt->ae_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+ rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+ break;
+ case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+ rc = m_parent->processHDRData(internal_evt->hdr_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+ rc = m_parent->processRetroAECUnlock();
+ break;
+ case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+ rc = m_parent->processZSLCaptureDone();
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ internal_evt->evt_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ // Send internal events to stop indefinite wait on prepare
+ // snapshot done event.
+ result.status = rc;
+ result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+
+ result.status = rc;
+ result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ cam_evt->server_event_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ // No ops, but need to notify
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_PIC_TAKING.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPicTakingState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ {
+ // Error setting preview window during previewing
+ LOGE("Error!! cannot set preview window when preview is running");
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ rc = m_parent->enableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ rc = m_parent->disableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ {
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ m_parent->setNeedRestart(false);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ result.params = m_parent->getParameters();
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ // cancel picture first
+ rc = m_parent->cancelPicture();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->dump(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ {
+ rc = m_parent->autoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->cancelAutoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+#ifndef VANILLA_HAL
+ if ( CAMERA_CMD_LONGSHOT_OFF == cmd_payload->cmd ) {
+ // move state to previewing state
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+#endif
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ {
+ rc = m_parent->cancelPicture();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ {
+ if ( m_parent->isLongshotEnabled() ) {
+ // no ops here, need to singal NO_ERROR
+ rc = NO_ERROR;
+ } else {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ {
+ if ( m_parent->isLongshotEnabled() ) {
+ rc = m_parent->longShot();
+ } else {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_RECORDING:
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+ rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+ break;
+ case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+ break;
+ case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+ break;
+ case QCAMERA_INTERNAL_EVT_CROP_INFO:
+ rc = m_parent->processZoomEvent(internal_evt->crop_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+ rc = m_parent->processASDUpdate(internal_evt->asd_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+ rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+ rc = m_parent->processAEInfo(internal_evt->ae_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+ rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+ break;
+ case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+ rc = m_parent->processHDRData(internal_evt->hdr_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+ rc = m_parent->processRetroAECUnlock();
+ break;
+ case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+ rc = m_parent->processZSLCaptureDone();
+ break;
+ default:
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ // Send internal events to stop indefinite wait on prepare
+ // snapshot done event.
+ result.status = rc;
+ result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+
+ result.status = rc;
+ result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ case CAM_EVENT_TYPE_CAC_DONE:
+ if (m_parent->isCACEnabled() || m_parent->mParameters.isOEMFeatEnabled()) {
+ LOGD("[LONG_SHOT_DBG] : Received CAC Done");
+ if (m_parent->isLongshotEnabled()
+ && !m_parent->isCaptureShutterEnabled()) {
+ // play shutter sound for longshot
+ // after CAC stage is done
+ m_parent->playShutter();
+ }
+ m_parent->mCACDoneReceived = TRUE;
+ }
+ break;
+ default:
+ LOGH("no handling for server evt (%d) at this state",
+ cam_evt->server_event_type);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ {
+ qcamera_jpeg_evt_payload_t *jpeg_job =
+ (qcamera_jpeg_evt_payload_t *)payload;
+ rc = m_parent->processJpegNotify(jpeg_job);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL:
+ {
+ bool restartPreview = m_parent->isPreviewRestartEnabled();
+ rc = m_parent->stopCaptureChannel(restartPreview);
+
+ if (restartPreview && (NO_ERROR == rc)) {
+ rc = m_parent->preparePreview();
+ if (NO_ERROR == rc) {
+ m_parent->m_bPreviewStarted = true;
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ }
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ rc = m_parent->cancelPicture();
+
+ bool restartPreview = m_parent->isPreviewRestartEnabled();
+ if (restartPreview) {
+ if (m_parent->mParameters.getManualCaptureMode()
+ >= CAM_MANUAL_CAPTURE_TYPE_3) {
+ m_parent->mParameters.updateZSLModeValue(m_RestoreZSL);
+ m_RestoreZSL = FALSE;
+ rc = m_parent->preparePreview();
+ if (NO_ERROR == rc) {
+ m_parent->m_bPreviewStarted = true;
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ }
+ }
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ } else {
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ break;
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtRecordingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_RECORDING.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtRecordingState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ {
+ // WA: CTS test VideoSnapshot will try to
+ // start preview during video recording.
+ LOGH("CTS video restart op");
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ rc = m_parent->enableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ rc = m_parent->disableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ if (rc == NO_ERROR) {
+ if (needRestart) {
+ // cannot set parameters that requires restart during recording
+ LOGE("Error!! cannot set parameters that requires restart during recording");
+ rc = BAD_VALUE;
+ }
+ }
+ if (rc != NO_ERROR) {
+ m_parent->setNeedRestart(false);
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ ALOGE("%s: Error!! cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ result.params = m_parent->getParameters();
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 1;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->dump(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ {
+ rc = m_parent->autoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->cancelAutoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ {
+ // No ops here, send NO_ERROR.
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ {
+ m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+ rc = m_parent->takeLiveSnapshot();
+ if (rc != NO_ERROR) {
+ m_parent->unconfigureAdvancedCapture();
+ m_state = QCAMERA_SM_STATE_RECORDING;
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_RECORDING:
+ {
+ // no ops here
+ LOGW("already in recording state, no ops for start_recording");
+ rc = 0;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ {
+ rc = m_parent->stopRecording();
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ rc = m_parent->stopRecording();
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+ rc = m_parent->stopPreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ {
+ rc = m_parent->releaseRecordingFrame((const void *)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ {
+ //In Video snapshot, prepare hardware is a no-op.
+ result.status = NO_ERROR;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+ rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+ break;
+ case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+ rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+ rc = m_parent->processHistogramStats(internal_evt->stats_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_CROP_INFO:
+ rc = m_parent->processZoomEvent(internal_evt->crop_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+ rc = m_parent->processASDUpdate(internal_evt->asd_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+ rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+ rc = m_parent->processAEInfo(internal_evt->ae_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+ rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+ break;
+ case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+ rc = m_parent->processHDRData(internal_evt->hdr_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+ rc = m_parent->processRetroAECUnlock();
+ break;
+ case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+ rc = m_parent->processZSLCaptureDone();
+ break;
+ default:
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ cam_evt->server_event_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ // No ops, but need to notify
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtVideoPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_VIDEO_PIC_TAKING.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ {
+ // Error setting preview window during previewing
+ LOGE("Error!! cannot set preview window when preview is running");
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ rc = m_parent->enableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ rc = m_parent->disableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ if (rc == NO_ERROR) {
+ if (needRestart) {
+ // cannot set parameters that requires restart during recording
+ LOGE("Error!! cannot set parameters that requires restart during recording");
+ rc = BAD_VALUE;
+ }
+ }
+ if (rc != NO_ERROR) {
+ m_parent->setNeedRestart(false);
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ ALOGE("%s: Error!! cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ result.params = m_parent->getParameters();
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 1;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 1;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->dump(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ {
+ rc = m_parent->autoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->cancelAutoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ {
+ rc = m_parent->cancelLiveSnapshot();
+ m_state = QCAMERA_SM_STATE_RECORDING;
+
+ rc = m_parent->stopRecording();
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ {
+ rc = m_parent->releaseRecordingFrame((const void *)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ {
+ rc = m_parent->cancelLiveSnapshot();
+ m_state = QCAMERA_SM_STATE_RECORDING;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ rc = m_parent->cancelLiveSnapshot();
+ m_state = QCAMERA_SM_STATE_RECORDING;
+
+ rc = m_parent->stopRecording();
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+ rc = m_parent->stopPreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_RECORDING:
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+ rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+ break;
+ case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+ rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+ rc = m_parent->processHistogramStats(internal_evt->stats_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_CROP_INFO:
+ rc = m_parent->processZoomEvent(internal_evt->crop_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+ rc = m_parent->processASDUpdate(internal_evt->asd_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+ rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+ rc = m_parent->processAEInfo(internal_evt->ae_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+ rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+ break;
+ case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+ rc = m_parent->processHDRData(internal_evt->hdr_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+ rc = m_parent->processRetroAECUnlock();
+ break;
+ case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+ rc = m_parent->processZSLCaptureDone();
+ break;
+ default:
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ cam_evt->server_event_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ {
+ qcamera_jpeg_evt_payload_t *jpeg_job =
+ (qcamera_jpeg_evt_payload_t *)payload;
+ rc = m_parent->processJpegNotify(jpeg_job);
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ rc = m_parent->cancelLiveSnapshot();
+ m_state = QCAMERA_SM_STATE_RECORDING;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ break;
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : procEvtPreviewPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ * QCAMERA_SM_STATE_PREVIEW_PIC_TAKING.
+ *
+ * PARAMETERS :
+ * @evt : event to be processed
+ * @payload : event payload. Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt,
+ void *payload)
+{
+ int32_t rc = NO_ERROR;
+ qcamera_api_result_t result;
+ memset(&result, 0, sizeof(qcamera_api_result_t));
+
+ LOGL("event (%d)", evt);
+ switch (evt) {
+ case QCAMERA_SM_EVT_SET_CALLBACKS:
+ {
+ qcamera_sm_evt_setcb_payload_t *setcbs =
+ (qcamera_sm_evt_setcb_payload_t *)payload;
+ rc = m_parent->setCallBacks(setcbs->notify_cb,
+ setcbs->data_cb,
+ setcbs->data_cb_timestamp,
+ setcbs->get_memory,
+ setcbs->user);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+ {
+ rc = m_parent->enableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+ {
+ rc = m_parent->disableMsgType(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+ {
+ int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = enabled;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS:
+ {
+ bool needRestart = false;
+ rc = m_parent->updateParameters((char*)payload, needRestart);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+ {
+ // need restart preview for parameters to take effect
+ LOGD("Stopping preview...");
+ // stop preview
+ rc = m_parent->stopPreview();
+ // Clear memory pools
+ m_parent->m_memoryPool.clear();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+ {
+ // commit parameter changes to server
+ rc = m_parent->commitParameterChanges();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+ {
+ // start preview again
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ applyDelayedMsgs();
+ rc = m_parent->startPreview();
+ if (rc != NO_ERROR) {
+ m_parent->unpreparePreview();
+ }
+ }
+ if (rc != NO_ERROR) {
+ m_state = QCAMERA_SM_STATE_PIC_TAKING;
+ }
+ m_parent->setNeedRestart(false);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_GET_PARAMS:
+ {
+ result.params = m_parent->getParameters();
+ rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PUT_PARAMS:
+ {
+ rc = m_parent->putParameters((char*)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 1;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RECORDING_ENABLED:
+ {
+ rc = NO_ERROR;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+ result.enabled = 0;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+ {
+ rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_DUMP:
+ {
+ rc = m_parent->dump(*((int *)payload));
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+ {
+ rc = m_parent->autoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+ {
+ rc = m_parent->cancelAutoFocus();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_SEND_COMMAND:
+ {
+ qcamera_sm_evt_command_payload_t *cmd_payload =
+ (qcamera_sm_evt_command_payload_t *)payload;
+ rc = m_parent->sendCommand(cmd_payload->cmd,
+ cmd_payload->arg1,
+ cmd_payload->arg2);
+#ifndef VANILLA_HAL
+ if ( CAMERA_CMD_LONGSHOT_OFF == cmd_payload->cmd ) {
+ // move state to previewing state
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ }
+#endif
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+ {
+ rc = m_parent->releaseRecordingFrame((const void *)payload);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_CANCEL_PICTURE:
+ {
+ if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+ rc = m_parent->cancelPicture();
+ } else {
+ rc = m_parent->cancelLiveSnapshot();
+ }
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_PREVIEW:
+ {
+ if (m_parent->isZSLMode()) {
+ // cancel picture first
+ rc = m_parent->cancelPicture();
+ m_parent->stopChannel(QCAMERA_CH_TYPE_ZSL);
+ } else if (m_parent->isLongshotEnabled()) {
+ // just cancel picture
+ rc = m_parent->cancelPicture();
+ } else {
+ rc = m_parent->cancelLiveSnapshot();
+ m_parent->stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+ }
+ // unprepare preview
+ m_parent->unpreparePreview();
+ m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_START_RECORDING:
+ {
+ if (m_parent->isZSLMode()) {
+ LOGE("Error!! cannot handle evt(%d) in state(%d) in ZSL mode", evt, m_state);
+ rc = INVALID_OPERATION;
+ } else if (m_parent->isLongshotEnabled()) {
+ LOGE("Error!! cannot handle evt(%d) in state(%d) in Longshot mode", evt, m_state);
+ rc = INVALID_OPERATION;
+ } else {
+ rc = m_parent->preStartRecording();
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_START_RECORDING:
+ {
+ if (m_parent->isZSLMode()) {
+ LOGE("Error!! cannot handle evt(%d) in state(%d) in ZSL mode",
+ evt, m_state);
+ rc = INVALID_OPERATION;
+ } else if (m_parent->isLongshotEnabled()) {
+ LOGE("Error!! cannot handle evt(%d) in state(%d) in Longshot mode",
+ evt, m_state);
+ rc = INVALID_OPERATION;
+ } else {
+ rc = m_parent->startRecording();
+ if (rc == NO_ERROR) {
+ m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+ }
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+ {
+ int32_t faceID = 0;
+ qcamera_sm_evt_reg_face_payload_t *reg_payload =
+ (qcamera_sm_evt_reg_face_payload_t *)payload;
+ rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+ reg_payload->config,
+ faceID);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+ result.handle = faceID;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+ {
+ if ( m_parent->isLongshotEnabled() ) {
+ // no ops here, need to singal NO_ERROR
+ rc = NO_ERROR;
+ } else {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_TAKE_PICTURE:
+ {
+ if ( m_parent->isLongshotEnabled() ) {
+ rc = m_parent->longShot();
+ } else {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ }
+
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+
+ case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+ {
+ LOGD("Prepare Snapshot");
+ if (m_parent->isRetroPicture()) {
+ LOGD("Prepare Snapshot in Retro Mode");
+ rc = m_parent->prepareHardwareForSnapshot(FALSE);
+ if (rc != NO_ERROR) {
+ LOGE("prepareHardwareForSnapshot failed %d",
+ rc);
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ }
+ else {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)",
+ evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_STOP_RECORDING:
+ case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+ case QCAMERA_SM_EVT_START_PREVIEW:
+ case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+ case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+ case QCAMERA_SM_EVT_RELEASE:
+ {
+ LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+ rc = INVALID_OPERATION;
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_INTERNAL:
+ {
+ qcamera_sm_internal_evt_payload_t *internal_evt =
+ (qcamera_sm_internal_evt_payload_t *)payload;
+ switch (internal_evt->evt_type) {
+ case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+ rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+ LOGD("Received QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE event");
+ if (m_parent->isRetroPicture()) {
+ m_parent->processPrepSnapshotDoneEvent(internal_evt->prep_snapshot_state);
+ LOGD("Retro picture");
+ result.status = NO_ERROR;
+ result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ else {
+ LOGE("Invalid Case for "
+ "QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT event");
+ }
+ break;
+ case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+ rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT:
+ // This is valid only in Retro picture Mode
+ if (m_parent->isRetroPicture()) {
+ LOGD("Received QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT event");
+ result.status = NO_ERROR;
+ result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ else {
+ LOGD("Wrong Case for QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT event");
+ }
+ break;
+ case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+ rc = m_parent->processHistogramStats(internal_evt->stats_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_CROP_INFO:
+ rc = m_parent->processZoomEvent(internal_evt->crop_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+ rc = m_parent->processASDUpdate(internal_evt->asd_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+ rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+ rc = m_parent->processAEInfo(internal_evt->ae_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+ rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+ break;
+ case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+ rc = m_parent->processHDRData(internal_evt->hdr_data);
+ break;
+ case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+ rc = m_parent->processRetroAECUnlock();
+ break;
+ case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+ rc = m_parent->processZSLCaptureDone();
+ break;
+ default:
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_EVT_NOTIFY:
+ {
+ mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+ switch (cam_evt->server_event_type) {
+ case CAM_EVENT_TYPE_DAEMON_DIED:
+ {
+ // Send internal events to stop indefinite wait on prepare
+ // snapshot done event.
+ result.status = rc;
+ result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+
+ result.status = rc;
+ result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+
+ m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+ CAMERA_ERROR_SERVER_DIED,
+ 0);
+ }
+ break;
+ case CAM_EVENT_TYPE_CAC_DONE:
+ if (m_parent->isCACEnabled() || m_parent->mParameters.isOEMFeatEnabled()) {
+ LOGD("[LONG_SHOT_DBG] : Received CAC Done");
+ if ((m_parent->isLongshotEnabled())
+ && (!m_parent->isCaptureShutterEnabled())) {
+ // play shutter sound for longshot
+ // after CAC stage is done
+ m_parent->playShutter();
+ }
+ m_parent->mCACDoneReceived = TRUE;
+ }
+ break;
+ default:
+ LOGE("Invalid internal event %d in state(%d)",
+ cam_evt->server_event_type, m_state);
+ break;
+ }
+ }
+ break;
+ case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+ {
+ LOGL("Calling Process Jpeg Notify");
+ qcamera_jpeg_evt_payload_t *jpeg_job =
+ (qcamera_jpeg_evt_payload_t *)payload;
+ rc = m_parent->processJpegNotify(jpeg_job);
+ }
+ break;
+ case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+ {
+ LOGL("Snapshot Done");
+ if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+ rc = m_parent->cancelPicture();
+ } else {
+ rc = m_parent->cancelLiveSnapshot();
+ }
+ m_state = QCAMERA_SM_STATE_PREVIEWING;
+ if (m_parent->isRetroPicture()){
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ LOGL("\n Signalling for JPEG snapshot done!!");
+ m_parent->signalAPIResult(&result);
+
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalEvtResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+ {
+ rc = m_parent->updateThermalLevel(payload);
+ }
+ break;
+ case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+ {
+ m_parent->stopPreview();
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+ {
+ rc = m_parent->preparePreview();
+ if (rc == NO_ERROR) {
+ rc = m_parent->startPreview();
+ }
+ result.status = rc;
+ result.request_api = evt;
+ result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+ m_parent->signalAPIResult(&result);
+ }
+ break;
+ default:
+ LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : isRecording
+ *
+ * DESCRIPTION: check if recording is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : true -- recording
+ * false -- not in recording mode
+ *==========================================================================*/
+bool QCameraStateMachine::isRecording()
+{
+ switch (m_state) {
+ case QCAMERA_SM_STATE_RECORDING:
+ case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+ return true;
+ default:
+ return false;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : isPreviewRunning
+ *
+ * DESCRIPTION: check if preview is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : true -- preview running
+ * false -- preview stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewRunning()
+{
+ switch (m_state) {
+ case QCAMERA_SM_STATE_PREVIEWING:
+ case QCAMERA_SM_STATE_RECORDING:
+ case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+ case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+ case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+ case QCAMERA_SM_STATE_PREVIEW_READY:
+ return true;
+ default:
+ return false;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : isPreviewReady
+ *
+ * DESCRIPTION: check if preview is in ready state.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : true -- preview is in ready state
+ * false -- preview is stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewReady()
+{
+ switch (m_state) {
+ case QCAMERA_SM_STATE_PREVIEW_READY:
+ return true;
+ default:
+ return false;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : isCaptureRunning
+ *
+ * DESCRIPTION: check if image capture is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : true -- capture running
+ * false -- capture stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isCaptureRunning()
+{
+ switch (m_state) {
+ case QCAMERA_SM_STATE_PIC_TAKING:
+ case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+ case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+ return true;
+ default:
+ return false;
+ }
+}
+/*===========================================================================
+ * FUNCTION : isNonZSLCaptureRunning
+ *
+ * DESCRIPTION: check if image capture is in process in non ZSL mode.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : true -- capture running in non ZSL mode
+ * false -- Either in not capture mode or captur is not in non ZSL mode
+ *==========================================================================*/
+bool QCameraStateMachine::isNonZSLCaptureRunning()
+{
+ switch (m_state) {
+ case QCAMERA_SM_STATE_PIC_TAKING:
+ return true;
+ default:
+ return false;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : dump
+ *
+ * DESCRIPTION: Composes a string based on current configuration
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : Formatted string
+ *==========================================================================*/
+String8 QCameraStateMachine::dump()
+{
+ String8 str("\n");
+ char s[128];
+
+ snprintf(s, 128, "Is Preview Running: %d\n", isPreviewRunning());
+ str += s;
+
+ snprintf(s, 128, "Is Capture Running: %d\n", isCaptureRunning());
+ str += s;
+
+ snprintf(s, 128, "Is Non ZSL Capture Running: %d\n",
+ isNonZSLCaptureRunning());
+ str += s;
+
+ snprintf(s, 128, "Current State: %d \n", m_state);
+ str += s;
+
+ switch(m_state){
+ case QCAMERA_SM_STATE_PREVIEW_STOPPED:
+ snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEW_STOPPED \n");
+ break;
+
+ case QCAMERA_SM_STATE_PREVIEW_READY:
+ snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEW_READY \n");
+ break;
+
+ case QCAMERA_SM_STATE_PREVIEWING:
+ snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEWING \n");
+ break;
+
+ case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+ snprintf(s, 128, " QCAMERA_SM_STATE_PREPARE_SNAPSHOT \n");
+ break;
+
+ case QCAMERA_SM_STATE_PIC_TAKING:
+ snprintf(s, 128, " QCAMERA_SM_STATE_PIC_TAKING \n");
+ break;
+
+ case QCAMERA_SM_STATE_RECORDING:
+ snprintf(s, 128, " QCAMERA_SM_STATE_RECORDING \n");
+ break;
+
+ case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+ snprintf(s, 128, " QCAMERA_SM_STATE_VIDEO_PIC_TAKING \n");
+ break;
+
+ case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+ snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEW_PIC_TAKING \n");
+ break;
+ }
+ str += s;
+
+ return str;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraStateMachine.h b/camera/QCamera2/HAL/QCameraStateMachine.h
new file mode 100644
index 0000000..b02ba06
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStateMachine.h
@@ -0,0 +1,263 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STATEMACHINE_H__
+#define __QCAMERA_STATEMACHINE_H__
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "QCameraQueue.h"
+#include "QCameraChannel.h"
+#include "cam_semaphore.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCamera2HardwareInterface;
+
+typedef enum {
+ /*******BEGIN OF: API EVT*********/
+ QCAMERA_SM_EVT_SET_PREVIEW_WINDOW = 1, // set preview window
+ QCAMERA_SM_EVT_SET_CALLBACKS, // set callbacks
+ QCAMERA_SM_EVT_ENABLE_MSG_TYPE, // enable msg type
+ QCAMERA_SM_EVT_DISABLE_MSG_TYPE, // disable msg type
+ QCAMERA_SM_EVT_MSG_TYPE_ENABLED, // query certain msg type is enabled
+
+ QCAMERA_SM_EVT_SET_PARAMS, // set parameters
+ QCAMERA_SM_EVT_SET_PARAMS_STOP, // stop camera after set params, if necessary
+ QCAMERA_SM_EVT_SET_PARAMS_COMMIT, // commit set params
+ QCAMERA_SM_EVT_SET_PARAMS_RESTART, // restart after set params, if necessary
+ QCAMERA_SM_EVT_GET_PARAMS, // get parameters
+ QCAMERA_SM_EVT_PUT_PARAMS, // put parameters, release param buf
+
+ QCAMERA_SM_EVT_PREPARE_PREVIEW, // prepare preview (zsl, camera mode, camcorder mode)
+ QCAMERA_SM_EVT_START_PREVIEW, // start preview (zsl, camera mode, camcorder mode)
+ QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW, // start no display preview (zsl, camera mode, camcorder mode)
+ QCAMERA_SM_EVT_STOP_PREVIEW, // stop preview (zsl, camera mode, camcorder mode)
+ QCAMERA_SM_EVT_PREVIEW_ENABLED, // query if preview is running
+
+ QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, // request to store meta data in video buffers
+ QCAMERA_SM_EVT_PRE_START_RECORDING, // pre start recording, to prepare for recording
+ QCAMERA_SM_EVT_START_RECORDING, // start recording
+ QCAMERA_SM_EVT_STOP_RECORDING, // stop recording
+ QCAMERA_SM_EVT_RECORDING_ENABLED, // query if recording is running
+ QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, // release recording frame
+
+ QCAMERA_SM_EVT_PREPARE_SNAPSHOT, // prepare snapshot in case LED needs to be flashed
+ QCAMERA_SM_EVT_PRE_TAKE_PICTURE, // pre take picutre (to restart preview if necessary)
+ QCAMERA_SM_EVT_TAKE_PICTURE, // take picutre (zsl, regualr capture, live snapshot
+ QCAMERA_SM_EVT_CANCEL_PICTURE, // cancel picture
+
+ QCAMERA_SM_EVT_START_AUTO_FOCUS, // start auto focus
+ QCAMERA_SM_EVT_STOP_AUTO_FOCUS, // stop auto focus
+ QCAMERA_SM_EVT_SEND_COMMAND, // send command
+
+ QCAMERA_SM_EVT_RELEASE, // release camera resource
+ QCAMERA_SM_EVT_DUMP, // dump
+ QCAMERA_SM_EVT_REG_FACE_IMAGE, // register a face image in imaging lib
+ /*******END OF: API EVT*********/
+
+ QCAMERA_SM_EVT_EVT_INTERNAL, // internal evt notify
+ QCAMERA_SM_EVT_EVT_NOTIFY, // evt notify from server
+ QCAMERA_SM_EVT_JPEG_EVT_NOTIFY, // evt notify from jpeg
+ QCAMERA_SM_EVT_SNAPSHOT_DONE, // internal evt that snapshot is done
+ QCAMERA_SM_EVT_THERMAL_NOTIFY, // evt notify from thermal daemon
+ QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL, // stop capture channel
+ QCAMERA_SM_EVT_RESTART_PERVIEW, // internal preview restart
+ QCAMERA_SM_EVT_DELAYED_RESTART, // preview restart needs delay (dual camera mode)
+ QCAMERA_SM_EVT_SEND_COMMAND_RESTART, // restart after send command (if necessary)
+ QCAMERA_SM_EVT_RESTART_START_PREVIEW, // preview start as part of restart (dual camera mode)
+ QCAMERA_SM_EVT_RESTART_STOP_PREVIEW, // preview stop as part of restart (dual camera mode)
+ QCAMERA_SM_EVT_MAX
+} qcamera_sm_evt_enum_t;
+
+typedef enum {
+ QCAMERA_API_RESULT_TYPE_DEF, // default type, no additional info
+ QCAMERA_API_RESULT_TYPE_ENABLE_FLAG, // msg_enabled, preview_enabled, recording_enabled
+ QCAMERA_API_RESULT_TYPE_PARAMS, // returned parameters in string
+ QCAMERA_API_RESULT_TYPE_HANDLE, // returned handle in int
+ QCAMERA_API_RESULT_TYPE_MAX
+} qcamera_api_result_type_t;
+
+typedef struct {
+ int32_t status; // api call status
+ qcamera_sm_evt_enum_t request_api; // api evt requested
+ qcamera_api_result_type_t result_type; // result type
+ union {
+ int enabled; // result_type == QCAMERA_API_RESULT_TYPE_ENABLE_FLAG
+ char *params; // result_type == QCAMERA_API_RESULT_TYPE_PARAMS
+ int handle; // result_type ==QCAMERA_API_RESULT_TYPE_HANDLE
+ };
+} qcamera_api_result_t;
+
+typedef struct api_result_list {
+ qcamera_api_result_t result;
+ struct api_result_list *next;
+}api_result_list;
+
+// definition for payload type of setting callback
+typedef struct {
+ camera_notify_callback notify_cb;
+ camera_data_callback data_cb;
+ camera_data_timestamp_callback data_cb_timestamp;
+ camera_request_memory get_memory;
+ void *user;
+} qcamera_sm_evt_setcb_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+ int32_t cmd;
+ int32_t arg1;
+ int32_t arg2;
+} qcamera_sm_evt_command_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+ void *img_ptr;
+ cam_pp_offline_src_config_t *config;
+} qcamera_sm_evt_reg_face_payload_t;
+
+typedef enum {
+ QCAMERA_INTERNAL_EVT_FOCUS_UPDATE, // focus updating result
+ QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE, // prepare snapshot done
+ QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT, // face detection result
+ QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS, // histogram
+ QCAMERA_INTERNAL_EVT_CROP_INFO, // crop info
+ QCAMERA_INTERNAL_EVT_ASD_UPDATE, // asd update result
+ QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT, // Ready for Prepare Snapshot
+ QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE, // Led mode override
+ QCAMERA_INTERNAL_EVT_AWB_UPDATE, // awb update result
+ QCAMERA_INTERNAL_EVT_AE_UPDATE, // ae update result
+ QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE, // focus position update result
+ QCAMERA_INTERNAL_EVT_HDR_UPDATE, // HDR scene update
+ QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK, // retro burst AEC unlock event
+ QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE, // ZSL capture done event
+ QCAMERA_INTERNAL_EVT_MAX
+} qcamera_internal_evt_type_t;
+
+typedef struct {
+ qcamera_internal_evt_type_t evt_type;
+ union {
+ cam_auto_focus_data_t focus_data;
+ cam_prep_snapshot_state_t prep_snapshot_state;
+ cam_faces_data_t faces_data;
+ cam_hist_stats_t stats_data;
+ cam_crop_data_t crop_data;
+ cam_asd_decision_t asd_data;
+ cam_flash_mode_t led_data;
+ cam_awb_params_t awb_data;
+ cam_3a_params_t ae_data;
+ cam_focus_pos_info_t focus_pos;
+ cam_asd_hdr_scene_data_t hdr_data;
+ };
+} qcamera_sm_internal_evt_payload_t;
+
+class QCameraStateMachine
+{
+public:
+ QCameraStateMachine(QCamera2HardwareInterface *ctrl);
+ virtual ~QCameraStateMachine();
+ int32_t procAPI(qcamera_sm_evt_enum_t evt, void *api_payload);
+ int32_t procEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+
+ bool isPreviewRunning(); // check if preview is running
+ bool isPreviewReady(); // check if preview is ready
+ bool isCaptureRunning(); // check if image capture is running
+ bool isNonZSLCaptureRunning(); // check if image capture is running in non ZSL mode
+ String8 dump(); //returns the state information in a string
+ bool isPrepSnapStateRunning();
+ bool isRecording();
+ void releaseThread();
+
+ bool isPreviewCallbackNeeded() { return m_bPreviewCallbackNeeded; };
+ int32_t setPreviewCallbackNeeded(bool enabled) {m_bPreviewCallbackNeeded=enabled; return 0;};
+private:
+ typedef enum {
+ QCAMERA_SM_STATE_PREVIEW_STOPPED, // preview is stopped
+ QCAMERA_SM_STATE_PREVIEW_READY, // preview started but preview window is not set yet
+ QCAMERA_SM_STATE_PREVIEWING, // previewing
+ QCAMERA_SM_STATE_PREPARE_SNAPSHOT, // prepare snapshot in case aec estimation is
+ // needed for LED flash
+ QCAMERA_SM_STATE_PIC_TAKING, // taking picture (preview stopped)
+ QCAMERA_SM_STATE_RECORDING, // recording (preview running)
+ QCAMERA_SM_STATE_VIDEO_PIC_TAKING, // taking live snapshot during recording (preview running)
+ QCAMERA_SM_STATE_PREVIEW_PIC_TAKING // taking ZSL/live snapshot (recording stopped but preview running)
+ } qcamera_state_enum_t;
+
+ typedef enum
+ {
+ QCAMERA_SM_CMD_TYPE_API, // cmd from API
+ QCAMERA_SM_CMD_TYPE_EVT, // cmd from mm-camera-interface/mm-jpeg-interface event
+ QCAMERA_SM_CMD_TYPE_EXIT, // cmd for exiting statemachine cmdThread
+ QCAMERA_SM_CMD_TYPE_MAX
+ } qcamera_sm_cmd_type_t;
+
+ typedef struct {
+ qcamera_sm_cmd_type_t cmd; // cmd type (where it comes from)
+ qcamera_sm_evt_enum_t evt; // event type
+ void *evt_payload; // ptr to payload
+ } qcamera_sm_cmd_t;
+
+ int32_t stateMachine(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtPreviewingState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtRecordingState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+ int32_t procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+
+ // main statemachine process routine
+ static void *smEvtProcRoutine(void *data);
+
+ int32_t applyDelayedMsgs();
+
+ QCamera2HardwareInterface *m_parent; // ptr to HWI
+ qcamera_state_enum_t m_state; // statemachine state
+ QCameraQueue api_queue; // cmd queue for APIs
+ QCameraQueue evt_queue; // cmd queue for evt from mm-camera-intf/mm-jpeg-intf
+ pthread_t cmd_pid; // cmd thread ID
+ cam_semaphore_t cmd_sem; // semaphore for cmd thread
+ bool m_bDelayPreviewMsgs; // Delay preview callback enable during ZSL snapshot
+ bool m_bPreviewNeedsRestart; // Preview needs restart
+ bool m_bPreviewDelayedRestart; // Preview delayed restart
+ int32_t m_DelayedMsgs;
+ bool m_RestoreZSL;
+ bool m_bPreviewCallbackNeeded;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STATEMACHINE_H__ */
diff --git a/camera/QCamera2/HAL/QCameraStream.cpp b/camera/QCamera2/HAL/QCameraStream.cpp
new file mode 100644
index 0000000..cbe8a6a
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStream.cpp
@@ -0,0 +1,2656 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStream"
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCameraBufferMaps.h"
+#include "QCamera2HWI.h"
+#include "QCameraStream.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define CAMERA_MIN_ALLOCATED_BUFFERS 3
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION : get_bufs
+ *
+ * DESCRIPTION: static function entry to allocate stream buffers
+ *
+ * PARAMETERS :
+ * @offset : offset info of stream buffers
+ * @num_bufs : number of buffers allocated
+ * @initial_reg_flag: flag to indicate if buffer needs to be registered
+ * at kernel initially
+ * @bufs : output of allocated buffers
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs(
+ cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data)
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+ if (!stream) {
+ LOGE("getBufs invalid stream pointer");
+ return NO_MEMORY;
+ }
+
+ if (stream->mStreamInfo != NULL
+ && stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ //Batch Mode. Allocate Butch buffers
+ return stream->allocateBatchBufs(offset, num_bufs,
+ initial_reg_flag, bufs, ops_tbl);
+ } else {
+ // Plane Buffer. Allocate plane buffer
+ return stream->getBufs(offset, num_bufs,
+ initial_reg_flag, bufs, ops_tbl);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : get_bufs_deffered
+ *
+ * DESCRIPTION: static function entry to allocate deffered stream buffers
+ *
+ * PARAMETERS :
+ * @offset : offset info of stream buffers
+ * @num_bufs : number of buffers allocated
+ * @initial_reg_flag: flag to indicate if buffer needs to be registered
+ * at kernel initially
+ * @bufs : output of allocated buffers
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs_deffered(
+ cam_frame_len_offset_t * /* offset */,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t * ops_tbl,
+ void *user_data)
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+
+ if (!stream) {
+ LOGE("getBufs invalid stream pointer");
+ return NO_MEMORY;
+ }
+
+ return stream->getBufsDeferred(NULL /*offset*/, num_bufs, initial_reg_flag, bufs,
+ ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION : put_bufs
+ *
+ * DESCRIPTION: static function entry to deallocate stream buffers
+ *
+ * PARAMETERS :
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs(
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data)
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+ if (!stream) {
+ LOGE("putBufs invalid stream pointer");
+ return NO_MEMORY;
+ }
+
+ if (stream->mStreamInfo != NULL
+ && stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ //Batch Mode. release Butch buffers
+ return stream->releaseBatchBufs(ops_tbl);
+ } else {
+ // Plane Buffer. release plane buffer
+ return stream->putBufs(ops_tbl);
+ }
+
+}
+
+/*===========================================================================
+ * FUNCTION : put_bufs_deffered
+ *
+ * DESCRIPTION: static function entry to deallocate deffered stream buffers
+ *
+ * PARAMETERS :
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs_deffered(
+ mm_camera_map_unmap_ops_tbl_t * /*ops_tbl */,
+ void * user_data )
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+
+ if (!stream) {
+ LOGE("put_bufs_deffered invalid stream pointer");
+ return NO_MEMORY;
+ }
+
+ return stream->putBufsDeffered();
+}
+
+/*===========================================================================
+ * FUNCTION : invalidate_buf
+ *
+ * DESCRIPTION: static function entry to invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ * @index : index of the stream buffer to invalidate
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidate_buf(uint32_t index, void *user_data)
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+ if (!stream) {
+ LOGE("invalid stream pointer");
+ return NO_MEMORY;
+ }
+
+ if (stream->mStreamInfo->is_secure == SECURE){
+ return 0;
+ }
+
+ if (stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ for (int i = 0; i < stream->mBufDefs[index].user_buf.bufs_used; i++) {
+ uint32_t buf_idx = stream->mBufDefs[index].user_buf.buf_idx[i];
+ stream->invalidateBuf(buf_idx);
+ }
+ } else {
+ return stream->invalidateBuf(index);
+ }
+
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : clean_invalidate_buf
+ *
+ * DESCRIPTION: static function entry to clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ * @index : index of the stream buffer to clean invalidate
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::clean_invalidate_buf(uint32_t index, void *user_data)
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+ if (!stream) {
+ LOGE("invalid stream pointer");
+ return NO_MEMORY;
+ }
+
+ if (stream->mStreamInfo->is_secure == SECURE){
+ return 0;
+ }
+
+ if (stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ for (int i = 0; i < stream->mBufDefs[index].user_buf.bufs_used; i++) {
+ uint32_t buf_idx = stream->mBufDefs[index].user_buf.buf_idx[i];
+ stream->cleanInvalidateBuf(buf_idx);
+ }
+ } else {
+ return stream->cleanInvalidateBuf(index);
+ }
+
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : set_config_ops
+ *
+ * DESCRIPTION: static function update mm-interface ops functions
+ *
+ * PARAMETERS :
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ * @user_data : user data ptr of ops_tbl
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::set_config_ops(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data)
+{
+ QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+ if (!stream) {
+ LOGE("Stream invalid");
+ return NO_MEMORY;
+ }
+
+ stream->m_MemOpsTbl = *ops_tbl;
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : QCameraStream
+ *
+ * DESCRIPTION: constructor of QCameraStream
+ *
+ * PARAMETERS :
+ * @allocator : memory allocator obj
+ * @camHandle : camera handle
+ * @chId : channel handle
+ * @camOps : ptr to camera ops table
+ * @paddingInfo: ptr to padding info
+ * @deffered : deferred stream
+ * @online_rotation: rotation applied online
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraStream::QCameraStream(QCameraAllocator &allocator,
+ uint32_t camHandle, uint32_t chId,
+ mm_camera_ops_t *camOps, cam_padding_info_t *paddingInfo,
+ bool deffered, cam_rotation_t online_rotation):
+ mDumpFrame(0),
+ mDumpMetaFrame(0),
+ mDumpSkipCnt(0),
+ mStreamTimestamp(0),
+ mCamHandle(camHandle),
+ mChannelHandle(chId),
+ mHandle(0),
+ mCamOps(camOps),
+ mStreamInfo(NULL),
+ mNumBufs(0),
+ mNumPlaneBufs(0),
+ mNumBufsNeedAlloc(0),
+ mRegFlags(NULL),
+ mDataCB(NULL),
+ mSYNCDataCB(NULL),
+ mUserData(NULL),
+ mDataQ(releaseFrameData, this),
+ mStreamInfoBuf(NULL),
+ mMiscBuf(NULL),
+ mStreamBufs(NULL),
+ mStreamBatchBufs(NULL),
+ mAllocator(allocator),
+ mBufDefs(NULL),
+ mPlaneBufDefs(NULL),
+ mOnlineRotation(online_rotation),
+ mStreamBufsAcquired(false),
+ m_bActive(false),
+ mDynBufAlloc(false),
+ mBufAllocPid(0),
+ mDefferedAllocation(deffered),
+ wait_for_cond(false),
+ mAllocTaskId(0),
+ mMapTaskId(0),
+ mSyncCBEnabled(false)
+{
+ mMemVtbl.user_data = this;
+ if ( !deffered ) {
+ mMemVtbl.get_bufs = get_bufs;
+ mMemVtbl.put_bufs = put_bufs;
+ } else {
+ mMemVtbl.get_bufs = get_bufs_deffered;
+ mMemVtbl.put_bufs = put_bufs_deffered;
+ }
+ mMemVtbl.invalidate_buf = invalidate_buf;
+ mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
+ mMemVtbl.set_config_ops = set_config_ops;
+ memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+ memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
+ memset(&mCropInfo, 0, sizeof(cam_rect_t));
+ memset(&m_MemOpsTbl, 0, sizeof(mm_camera_map_unmap_ops_tbl_t));
+ memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
+ memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
+ memset(&mAllocTask, 0, sizeof(mAllocTask));
+ memset(&mMapTask, 0, sizeof(mMapTask));
+ pthread_mutex_init(&mCropLock, NULL);
+ pthread_mutex_init(&mParameterLock, NULL);
+ mCurMetaMemory = NULL;
+ mCurBufIndex = -1;
+ mCurMetaIndex = -1;
+ mFirstTimeStamp = 0;
+ memset (&mStreamMetaMemory, 0,
+ (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
+ pthread_mutex_init(&m_lock, NULL);
+ pthread_cond_init(&m_cond, NULL);
+}
+
+/*===========================================================================
+ * FUNCTION : ~QCameraStream
+ *
+ * DESCRIPTION: deconstructor of QCameraStream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+QCameraStream::~QCameraStream()
+{
+ pthread_mutex_destroy(&mCropLock);
+ pthread_mutex_destroy(&mParameterLock);
+
+ mAllocator.waitForBackgroundTask(mAllocTaskId);
+ mAllocator.waitForBackgroundTask(mMapTaskId);
+ if (mBufAllocPid != 0) {
+ cond_signal(true);
+ LOGL("Wait for buf allocation thread dead");
+ // Wait for the allocation of additional stream buffers
+ pthread_join(mBufAllocPid, NULL);
+ mBufAllocPid = 0;
+ }
+
+ if (mDefferedAllocation) {
+ mStreamBufsAcquired = false;
+ releaseBuffs();
+ }
+
+ unmapStreamInfoBuf();
+ releaseStreamInfoBuf();
+
+ if (mMiscBuf) {
+ unMapBuf(mMiscBuf, CAM_MAPPING_BUF_TYPE_MISC_BUF, NULL);
+ releaseMiscBuf();
+ }
+
+ // delete stream
+ if (mHandle > 0) {
+ mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+ mHandle = 0;
+ }
+ pthread_mutex_destroy(&m_lock);
+ pthread_cond_destroy(&m_cond);
+}
+
+/*===========================================================================
+ * FUNCTION : unmapStreamInfoBuf
+ *
+ * DESCRIPTION: Unmap stream info buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapStreamInfoBuf()
+{
+ int rc = NO_ERROR;
+
+ if (mStreamInfoBuf != NULL) {
+ rc = mCamOps->unmap_stream_buf(mCamHandle,
+ mChannelHandle,
+ mHandle,
+ CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+ 0,
+ -1);
+
+ if (rc < 0) {
+ LOGE("Failed to unmap stream info buffer");
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseMiscBuf
+ *
+ * DESCRIPTION: Release misc buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseMiscBuf()
+{
+ int rc = NO_ERROR;
+
+ if (mMiscBuf != NULL) {
+ mMiscBuf->deallocate();
+ delete mMiscBuf;
+ mMiscBuf = NULL;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseStreamInfoBuf
+ *
+ * DESCRIPTION: Release stream info buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseStreamInfoBuf()
+{
+ int rc = NO_ERROR;
+
+ if (mStreamInfoBuf != NULL) {
+ mStreamInfoBuf->deallocate();
+ delete mStreamInfoBuf;
+ mStreamInfoBuf = NULL;
+ mStreamInfo = NULL;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : deleteStream
+ *
+ * DESCRIPTION: Deletes a camera stream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraStream::deleteStream()
+{
+ if (mHandle > 0) {
+ acquireStreamBufs();
+ releaseBuffs();
+ unmapStreamInfoBuf();
+ mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : unMapBuf
+ *
+ * DESCRIPTION: unmaps buffers
+ *
+ * PARAMETERS :
+ * @heapBuf : heap buffer handler
+ * @bufType : buffer type
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unMapBuf(QCameraMemory *Buf,
+ cam_mapping_buf_type bufType, __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int32_t rc = NO_ERROR;
+ uint8_t cnt;
+ ssize_t bufSize = BAD_INDEX;
+ uint32_t i;
+
+ cnt = Buf->getCnt();
+ for (i = 0; i < cnt; i++) {
+ bufSize = Buf->getSize(i);
+ if (BAD_INDEX != bufSize) {
+ if (m_MemOpsTbl.unmap_ops == NULL ) {
+ rc = mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle, mHandle,
+ bufType, i, -1);
+ } else {
+ rc = m_MemOpsTbl.unmap_ops(i, -1, bufType, m_MemOpsTbl.userdata);
+ }
+ if (rc < 0) {
+ LOGE("Failed to unmap buffer");
+ break;
+ }
+ } else {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ rc = BAD_INDEX;
+ break;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mapBufs
+ *
+ * DESCRIPTION: maps buffers
+ *
+ * PARAMETERS :
+ * @heapBuf : heap buffer handler
+ * @bufType : buffer type
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBufs(QCameraMemory *Buf,
+ cam_mapping_buf_type bufType, __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int32_t rc = NO_ERROR;
+ uint32_t i = 0;
+
+ QCameraBufferMaps bufferMaps;
+ for (i = 0; i < Buf->getCnt(); i++) {
+ ssize_t bufSize = Buf->getSize(i);
+ if (BAD_INDEX == bufSize) {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ return BAD_INDEX;
+ }
+
+ rc = bufferMaps.enqueue(bufType, mHandle, i /*buf index*/, -1 /*plane index*/,
+ 0 /*cookie*/, Buf->getFd(i), bufSize);
+
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ return BAD_INDEX;
+ }
+ }
+
+ cam_buf_map_type_list bufMapList;
+ rc = bufferMaps.getCamBufMapList(bufMapList);
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ return BAD_INDEX;
+ }
+
+ if (m_MemOpsTbl.bundled_map_ops == NULL) {
+ rc = mCamOps->map_stream_bufs(mCamHandle, mChannelHandle, &bufMapList);
+ } else {
+ rc = m_MemOpsTbl.bundled_map_ops(&bufMapList, m_MemOpsTbl.userdata);
+ }
+
+ if (rc < 0) {
+ LOGE("Failed to map buffer");
+ rc = BAD_INDEX;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : backgroundAllocate
+ *
+ * DESCRIPTION: schedule buffers to be allocated in the background
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::backgroundAllocate(void *data) {
+ QCameraStream *stream = (QCameraStream*)data;
+ int32_t rc = stream->allocateBuffers();
+ if (rc != NO_ERROR) {
+ LOGE("Error allocating buffers !!!");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : backgroundMap
+ *
+ * DESCRIPTION: map buffers in the background
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::backgroundMap(void *data) {
+ QCameraStream *stream = (QCameraStream*)data;
+ int32_t rc = stream->mapBuffers();
+ if (rc != NO_ERROR) {
+ LOGE("Error mapping buffers !!!");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : init
+ *
+ * DESCRIPTION: initialize stream obj
+ *
+ * PARAMETERS :
+ * @streamInfoBuf: ptr to buf that contains stream info
+ * @miscBuf : ptr to buf that contains misc bufs
+ * @stream_cb : stream data notify callback. Can be NULL if not needed
+ * @userdata : user data ptr
+ * @bDynallocBuf : flag to indicate if buffer allocation can be in 2 steps
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::init(QCameraHeapMemory *streamInfoBuf,
+ QCameraHeapMemory *miscBuf,
+ uint8_t minNumBuffers,
+ stream_cb_routine stream_cb,
+ void *userdata,
+ bool bDynallocBuf)
+{
+ int32_t rc = OK;
+
+ // assign and map stream info memory
+ mStreamInfoBuf = streamInfoBuf;
+ mStreamInfo = reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
+ mNumBufs = minNumBuffers;
+ mDynBufAlloc = bDynallocBuf;
+
+ // Calculate buffer size for deffered allocation
+ if (mDefferedAllocation) {
+ rc = calcOffset(mStreamInfo);
+ if (rc < 0) {
+ LOGE("Failed to calculate stream offset");
+ goto done;
+ }
+
+ mAllocTask.bgFunction = backgroundAllocate;
+ mAllocTask.bgArgs = this;
+ mAllocTaskId = mAllocator.scheduleBackgroundTask(&mAllocTask);
+ if (mAllocTaskId == 0) {
+ LOGE("Failed to schedule buffer alloction");
+ rc = -ENOMEM;
+ goto done;
+ }
+ }
+
+ mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
+ if (!mHandle) {
+ LOGE("add_stream failed");
+ rc = UNKNOWN_ERROR;
+ goto done;
+ }
+
+ rc = mapBufs(mStreamInfoBuf, CAM_MAPPING_BUF_TYPE_STREAM_INFO, NULL);
+ if (rc < 0) {
+ LOGE("Failed to map stream info buffer");
+ goto err1;
+ }
+
+ mMiscBuf = miscBuf;
+ if (miscBuf) {
+ rc = mapBufs(mMiscBuf, CAM_MAPPING_BUF_TYPE_MISC_BUF, NULL);
+ if (rc < 0) {
+ LOGE("Failed to map miscellaneous buffer");
+ releaseMiscBuf();
+ goto err1;
+ }
+ }
+
+ rc = configStream();
+ if (rc < 0) {
+ LOGE("Failed to config stream ");
+ goto err1;
+ }
+
+ if (mDefferedAllocation) {
+ mMapTask.bgFunction = backgroundMap;
+ mMapTask.bgArgs = this;
+ mMapTaskId = mAllocator.scheduleBackgroundTask(&mMapTask);
+ if (mMapTaskId == 0) {
+ LOGE("Failed to schedule buffer alloction");
+ rc = -ENOMEM;
+ goto err1;
+ }
+ }
+
+ mDataCB = stream_cb;
+ mUserData = userdata;
+ return 0;
+
+err1:
+ mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+ mHandle = 0;
+ mNumBufs = 0;
+done:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : calcOffset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ * @streamInfo : stream information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t QCameraStream::calcOffset(cam_stream_info_t *streamInfo)
+{
+ int32_t rc = 0;
+
+ cam_dimension_t dim = streamInfo->dim;
+ if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION &&
+ streamInfo->stream_type != CAM_STREAM_TYPE_VIDEO) {
+ if (streamInfo->pp_config.rotation == ROTATE_90 ||
+ streamInfo->pp_config.rotation == ROTATE_270) {
+ // rotated by 90 or 270, need to switch width and height
+ dim.width = streamInfo->dim.height;
+ dim.height = streamInfo->dim.width;
+ }
+ }
+
+ switch (streamInfo->stream_type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ case CAM_STREAM_TYPE_CALLBACK:
+ rc = mm_stream_calc_offset_preview(streamInfo,
+ &dim,
+ &mPaddingInfo,
+ &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ rc = mm_stream_calc_offset_post_view(streamInfo->fmt,
+ &dim,
+ &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ rc = mm_stream_calc_offset_snapshot(streamInfo->fmt,
+ &dim,
+ &mPaddingInfo,
+ &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ rc = mm_stream_calc_offset_postproc(streamInfo,
+ &mPaddingInfo,
+ &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ rc = mm_stream_calc_offset_video(streamInfo->fmt,
+ &dim, &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ rc = mm_stream_calc_offset_raw(streamInfo->fmt,
+ &dim,
+ &mPaddingInfo,
+ &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ rc = mm_stream_calc_offset_analysis(streamInfo->fmt,
+ &dim,
+ &mPaddingInfo,
+ &streamInfo->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ rc = mm_stream_calc_offset_metadata(&dim,
+ &mPaddingInfo,
+ &streamInfo->buf_planes);
+ break;
+ default:
+ LOGE("not supported for stream type %d",
+ streamInfo->stream_type);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : start
+ *
+ * DESCRIPTION: start stream. Will start main stream thread to handle stream
+ * related ops.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::start()
+{
+ int32_t rc = 0;
+ mDataQ.init();
+ rc = mProcTh.launch(dataProcRoutine, this);
+ if (rc == NO_ERROR) {
+ m_bActive = true;
+ }
+
+ mCurMetaMemory = NULL;
+ mCurBufIndex = -1;
+ mCurMetaIndex = -1;
+ mFirstTimeStamp = 0;
+ memset (&mStreamMetaMemory, 0,
+ (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : stop
+ *
+ * DESCRIPTION: stop stream. Will stop main stream thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::stop()
+{
+ int32_t rc = 0;
+ m_bActive = false;
+ mAllocator.waitForBackgroundTask(mAllocTaskId);
+ mAllocator.waitForBackgroundTask(mMapTaskId);
+ rc = mProcTh.exit();
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : syncRuntimeParams
+ *
+ * DESCRIPTION: query and sync runtime parameters like output crop
+ * buffer info etc.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::syncRuntimeParams()
+{
+ int32_t ret = NO_ERROR;
+
+ memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
+ m_OutputCrop.type = CAM_STREAM_PARAM_TYPE_GET_OUTPUT_CROP;
+
+ ret = getParameter(m_OutputCrop);
+ if (ret != NO_ERROR) {
+ LOGE("stream getParameter for output crop failed");
+ return ret;
+ }
+
+ memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
+ m_ImgProp.type = CAM_STREAM_PARAM_TYPE_GET_IMG_PROP;
+
+ ret = getParameter(m_ImgProp);
+ if (ret != NO_ERROR) {
+ LOGE("stream getParameter for image prop failed");
+ return ret;
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ * @previewWindoe : preview window ops table to set preview crop window
+ * @crop_info : crop info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processZoomDone(preview_stream_ops_t *previewWindow,
+ cam_crop_data_t &crop_info)
+{
+ int32_t rc = 0;
+
+ if (!m_bActive) {
+ LOGL("Stream not active");
+ return NO_ERROR;
+ }
+
+ // get stream param for crop info
+ for (int i = 0; i < crop_info.num_of_streams; i++) {
+ if (crop_info.crop_info[i].stream_id == mStreamInfo->stream_svr_id) {
+ pthread_mutex_lock(&mCropLock);
+ mCropInfo = crop_info.crop_info[i].crop;
+ pthread_mutex_unlock(&mCropLock);
+
+ // update preview window crop if it's preview/postview stream
+ if ( (previewWindow != NULL) &&
+ (mStreamInfo->stream_type == CAM_STREAM_TYPE_PREVIEW ||
+ mStreamInfo->stream_type == CAM_STREAM_TYPE_POSTVIEW) ) {
+ rc = previewWindow->set_crop(previewWindow,
+ mCropInfo.left,
+ mCropInfo.top,
+ mCropInfo.width,
+ mCropInfo.height);
+ }
+ break;
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : processDataNotify
+ *
+ * DESCRIPTION: process stream data notify
+ *
+ * PARAMETERS :
+ * @frame : stream frame received
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processDataNotify(mm_camera_super_buf_t *frame)
+{
+ LOGD("\n");
+
+ if (mDataQ.enqueue((void *)frame)) {
+ return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+ } else {
+ if (!m_bActive) {
+ LOGW("Stream thread is not active, no ops here %d", getMyType());
+ } else {
+ bufDone(frame->bufs[0]->buf_idx);
+ }
+ free(frame);
+ return NO_ERROR;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : dataNotifySYNCCB
+ *
+ * DESCRIPTION: This function registered with interface for
+ * SYNC callback if SYNC callback registered.
+ *
+ * PARAMETERS :
+ * @recvd_frame : stream frame received
+ * @userdata : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraStream::dataNotifySYNCCB(mm_camera_super_buf_t *recvd_frame,
+ void *userdata)
+{
+ LOGD("\n");
+ QCameraStream* stream = (QCameraStream *)userdata;
+ if (stream == NULL ||
+ recvd_frame == NULL ||
+ recvd_frame->bufs[0] == NULL ||
+ recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+ LOGE("Not a valid stream to handle buf");
+ return;
+ }
+ if ((stream->mSyncCBEnabled) && (stream->mSYNCDataCB != NULL))
+ stream->mSYNCDataCB(recvd_frame, stream, stream->mUserData);
+ return;
+}
+
+
+/*===========================================================================
+ * FUNCTION : dataNotifyCB
+ *
+ * DESCRIPTION: callback for data notify. This function is registered with
+ * mm-camera-interface to handle data notify
+ *
+ * PARAMETERS :
+ * @recvd_frame : stream frame received
+ * userdata : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraStream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+ void *userdata)
+{
+ LOGD("\n");
+ QCameraStream* stream = (QCameraStream *)userdata;
+ if (stream == NULL ||
+ recvd_frame == NULL ||
+ recvd_frame->bufs[0] == NULL ||
+ recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+ LOGE("Not a valid stream to handle buf");
+ return;
+ }
+
+ mm_camera_super_buf_t *frame =
+ (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("No mem for mm_camera_buf_def_t");
+ stream->bufDone(recvd_frame->bufs[0]->buf_idx);
+ return;
+ }
+ *frame = *recvd_frame;
+ stream->processDataNotify(frame);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : dataProcRoutine
+ *
+ * DESCRIPTION: function to process data in the main stream thread
+ *
+ * PARAMETERS :
+ * @data : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void *QCameraStream::dataProcRoutine(void *data)
+{
+ int running = 1;
+ int ret;
+ QCameraStream *pme = (QCameraStream *)data;
+ QCameraCmdThread *cmdThread = &pme->mProcTh;
+ cmdThread->setName("CAM_strmDatProc");
+
+ LOGD("E");
+ do {
+ do {
+ ret = cam_sem_wait(&cmdThread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ // we got notified about new cmd avail in cmd queue
+ camera_cmd_type_t cmd = cmdThread->getCmd();
+ switch (cmd) {
+ case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+ {
+ LOGH("Do next job");
+ mm_camera_super_buf_t *frame =
+ (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
+ if (NULL != frame) {
+ if (pme->mDataCB != NULL) {
+ pme->mDataCB(frame, pme, pme->mUserData);
+ } else {
+ // no data cb routine, return buf here
+ pme->bufDone(frame->bufs[0]->buf_idx);
+ free(frame);
+ }
+ }
+ }
+ break;
+ case CAMERA_CMD_TYPE_EXIT:
+ LOGH("Exit");
+ /* flush data buf queue */
+ pme->mDataQ.flush();
+ running = 0;
+ break;
+ default:
+ break;
+ }
+ } while (running);
+ LOGH("X");
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ * @index : index of buffer to be returned
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(uint32_t index)
+{
+ int32_t rc = NO_ERROR;
+
+ if (index >= mNumBufs || mBufDefs == NULL)
+ return BAD_INDEX;
+
+ rc = mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);
+
+ if (rc < 0)
+ return rc;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ * @opaque : stream frame/metadata buf to be returned
+ * @isMetaData: flag if returned opaque is a metadatabuf or the real frame ptr
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(const void *opaque, bool isMetaData)
+{
+ int32_t rc = NO_ERROR;
+ int index = -1;
+
+ if ((mStreamInfo != NULL)
+ && (mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH)
+ && (mStreamBatchBufs != NULL)) {
+ index = mStreamBatchBufs->getMatchBufIndex(opaque, isMetaData);
+ } else if (mStreamBufs != NULL){
+ index = mStreamBufs->getMatchBufIndex(opaque, isMetaData);
+ }
+
+ if (index == -1 || index >= mNumBufs || mBufDefs == NULL) {
+ LOGE("Cannot find buf for opaque data = %p", opaque);
+ return BAD_INDEX;
+ }
+
+ if ((CAMERA_MIN_VIDEO_BATCH_BUFFERS > index)
+ && mStreamMetaMemory[index].numBuffers > 0) {
+ for (int i= 0; i < mStreamMetaMemory[index].numBuffers; i++) {
+ uint8_t buf_idx = mStreamMetaMemory[index].buf_index[i];
+ bufDone((uint32_t)buf_idx);
+ }
+ mStreamMetaMemory[index].consumerOwned = FALSE;
+ mStreamMetaMemory[index].numBuffers = 0;
+ } else {
+ LOGH("Buffer Index = %d, Frame Idx = %d", index,
+ mBufDefs[index].frame_idx);
+ rc = bufDone((uint32_t)index);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumQueuedBuf
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t QCameraStream::getNumQueuedBuf()
+{
+ int32_t rc = -1;
+ if (mHandle > 0) {
+ rc = mCamOps->get_queued_buf_count(mCamHandle, mChannelHandle, mHandle);
+ }
+ if (rc == -1) {
+ LOGE("stream is not in active state. Invalid operation");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getBufs
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ * @offset : offset info of stream buffers
+ * @num_bufs : number of buffers allocated
+ * @initial_reg_flag: flag to indicate if buffer needs to be registered
+ * at kernel initially
+ * @bufs : output of allocated buffers
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getBufs(cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int rc = NO_ERROR;
+ uint8_t *regFlags;
+
+ if (!ops_tbl) {
+ LOGE("ops_tbl is NULL");
+ return INVALID_OPERATION;
+ }
+
+ mFrameLenOffset = *offset;
+
+ uint8_t numBufAlloc = mNumBufs;
+ mNumBufsNeedAlloc = 0;
+ if (mDynBufAlloc) {
+ numBufAlloc = CAMERA_MIN_ALLOCATED_BUFFERS;
+ if (numBufAlloc > mNumBufs) {
+ mDynBufAlloc = false;
+ numBufAlloc = mNumBufs;
+ } else {
+ mNumBufsNeedAlloc = (uint8_t)(mNumBufs - numBufAlloc);
+ }
+ }
+
+ /* For some stream types, buffer allocation may have already begun
+ * preemptively. If this is the case, we need to wait for the
+ * preemptive allocation to complete before proceeding. */
+ mAllocator.waitForDeferredAlloc(mStreamInfo->stream_type);
+
+ //Allocate stream buffer
+ mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+ mFrameLenOffset.frame_len, mFrameLenOffset.mp[0].stride,
+ mFrameLenOffset.mp[0].scanline, numBufAlloc);
+ if (!mStreamBufs) {
+ LOGE("Failed to allocate stream buffers");
+ return NO_MEMORY;
+ }
+
+ mNumBufs = (uint8_t)(numBufAlloc + mNumBufsNeedAlloc);
+ uint8_t numBufsToMap = mStreamBufs->getMappable();
+
+ QCameraBufferMaps bufferMaps;
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ ssize_t bufSize = mStreamBufs->getSize(i);
+ if (BAD_INDEX == bufSize) {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ return INVALID_OPERATION;
+ }
+
+ rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+ 0 /*stream id*/, i /*buf index*/, -1 /*plane index*/,
+ 0 /*cookie*/, mStreamBufs->getFd(i), bufSize);
+
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ return BAD_INDEX;
+ }
+ }
+
+ cam_buf_map_type_list bufMapList;
+ rc = bufferMaps.getCamBufMapList(bufMapList);
+ if (rc == NO_ERROR) {
+ rc = ops_tbl->bundled_map_ops(&bufMapList, ops_tbl->userdata);
+ }
+ if (rc < 0) {
+ LOGE("map_stream_buf failed: %d", rc);
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ return INVALID_OPERATION;
+ }
+
+ //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+ regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+ if (!regFlags) {
+ LOGE("Out of memory");
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ return NO_MEMORY;
+ }
+ memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+ mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+ if (mBufDefs == NULL) {
+ LOGE("getRegFlags failed %d", rc);
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ free(regFlags);
+ regFlags = NULL;
+ return INVALID_OPERATION;
+ }
+ memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+ }
+
+ rc = mStreamBufs->getRegFlags(regFlags);
+ if (rc < 0) {
+ LOGE("getRegFlags failed %d", rc);
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ free(mBufDefs);
+ mBufDefs = NULL;
+ free(regFlags);
+ regFlags = NULL;
+ return INVALID_OPERATION;
+ }
+
+ *num_bufs = mNumBufs;
+ *initial_reg_flag = regFlags;
+ *bufs = mBufDefs;
+ LOGH("stream type: %d, mRegFlags: 0x%x, numBufs: %d",
+ mStreamInfo->stream_type, regFlags, mNumBufs);
+
+ if (mNumBufsNeedAlloc > 0) {
+ pthread_mutex_lock(&m_lock);
+ wait_for_cond = TRUE;
+ pthread_mutex_unlock(&m_lock);
+ LOGH("Still need to allocate %d buffers",
+ mNumBufsNeedAlloc);
+ // start another thread to allocate the rest of buffers
+ pthread_create(&mBufAllocPid,
+ NULL,
+ BufAllocRoutine,
+ this);
+ pthread_setname_np(mBufAllocPid, "CAM_strmBuf");
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getBufsDeferred
+ *
+ * DESCRIPTION: allocate deferred stream buffers
+ *
+ * PARAMETERS :
+ * @offset : offset info of stream buffers
+ * @num_bufs : number of buffers allocated
+ * @initial_reg_flag: flag to indicate if buffer needs to be registered
+ * at kernel initially
+ * @bufs : output of allocated buffers
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getBufsDeferred(__unused cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int32_t rc = NO_ERROR;
+ // wait for allocation
+ rc = mAllocator.waitForBackgroundTask(mAllocTaskId);
+ if (rc != NO_ERROR) {
+ LOGE("Allocation Failed");
+ return NO_MEMORY;
+ }
+
+ if (!mRegFlags || !mBufDefs) {
+ LOGE("reg flags or buf defs uninitialized");
+ return NO_MEMORY;
+ }
+
+ *initial_reg_flag = mRegFlags;
+ *num_bufs = mNumBufs;
+ *bufs = mBufDefs;
+
+ LOGH("stream type: %d, mRegFlags: 0x%x, numBufs: %d",
+ getMyType(), mRegFlags, mNumBufs);
+
+ return NO_ERROR;
+}
+/*===========================================================================
+ * FUNCTION : mapNewBuffer
+ *
+ * DESCRIPTION: map a new stream buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapNewBuffer(uint32_t index)
+{
+ LOGH("E - index = %d", index);
+
+ int rc = NO_ERROR;
+
+ if (mStreamBufs == NULL) {
+ LOGE("Invalid Operation");
+ return INVALID_OPERATION;
+ }
+
+ ssize_t bufSize = mStreamBufs->getSize(index);
+ if (BAD_INDEX == bufSize) {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ return INVALID_OPERATION;
+ }
+
+ cam_buf_map_type_list bufMapList;
+ rc = QCameraBufferMaps::makeSingletonBufMapList(
+ CAM_MAPPING_BUF_TYPE_STREAM_BUF, 0 /*stream id*/, index,
+ -1 /*plane index*/, 0 /*cookie*/, mStreamBufs->getFd(index),
+ bufSize, bufMapList);
+
+ if (rc == NO_ERROR) {
+ rc = m_MemOpsTbl.bundled_map_ops(&bufMapList, m_MemOpsTbl.userdata);
+ }
+ if (rc < 0) {
+ LOGE("map_stream_buf failed: %d", rc);
+ rc = INVALID_OPERATION;
+ } else {
+ mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[index], index);
+ }
+
+ LOGH("X - rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateBuffers
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::allocateBuffers()
+{
+ int32_t rc = NO_ERROR;
+
+ mFrameLenOffset = mStreamInfo->buf_planes.plane_info;
+
+ if (mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ return allocateBatchBufs(&mFrameLenOffset,
+ &mNumBufs, &mRegFlags,
+ &mBufDefs, NULL);
+ }
+
+ /* This allocation is running in the deferred context, so it
+ * is safe (and necessary) to assume any preemptive allocation
+ * is already complete. Therefore, no need to wait here. */
+
+ uint8_t numBufAlloc = mNumBufs;
+ mNumBufsNeedAlloc = 0;
+ if (mDynBufAlloc) {
+ numBufAlloc = CAMERA_MIN_ALLOCATED_BUFFERS;
+ if (numBufAlloc > mNumBufs) {
+ mDynBufAlloc = false;
+ numBufAlloc = mNumBufs;
+ } else {
+ mNumBufsNeedAlloc = (uint8_t)(mNumBufs - numBufAlloc);
+ }
+ }
+
+ //Allocate and map stream info buffer
+ mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+ mFrameLenOffset.frame_len,
+ mFrameLenOffset.mp[0].stride,
+ mFrameLenOffset.mp[0].scanline,
+ numBufAlloc);
+
+ if (!mStreamBufs) {
+ LOGE("Failed to allocate stream buffers");
+ return NO_MEMORY;
+ }
+
+ mNumBufs = (uint8_t)(numBufAlloc + mNumBufsNeedAlloc);
+ uint8_t numBufsToMap = mStreamBufs->getMappable();
+
+ //regFlags array is allocated by us,
+ // but consumed and freed by mm-camera-interface
+ mRegFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+ if (!mRegFlags) {
+ LOGE("Out of memory");
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ return NO_MEMORY;
+ }
+ memset(mRegFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+ size_t bufDefsSize = mNumBufs * sizeof(mm_camera_buf_def_t);
+ mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
+ if (mBufDefs == NULL) {
+ LOGE("getRegFlags failed %d", rc);
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ free(mRegFlags);
+ mRegFlags = NULL;
+ return INVALID_OPERATION;
+ }
+ memset(mBufDefs, 0, bufDefsSize);
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+ }
+
+ rc = mStreamBufs->getRegFlags(mRegFlags);
+ if (rc < 0) {
+ LOGE("getRegFlags failed %d", rc);
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ free(mBufDefs);
+ mBufDefs = NULL;
+ free(mRegFlags);
+ mRegFlags = NULL;
+ return INVALID_OPERATION;
+ }
+
+ if (mNumBufsNeedAlloc > 0) {
+ pthread_mutex_lock(&m_lock);
+ wait_for_cond = TRUE;
+ pthread_mutex_unlock(&m_lock);
+ LOGH("Still need to allocate %d buffers",
+ mNumBufsNeedAlloc);
+ // start another thread to allocate the rest of buffers
+ pthread_create(&mBufAllocPid,
+ NULL,
+ BufAllocRoutine,
+ this);
+ pthread_setname_np(mBufAllocPid, "CAM_strmBufAlloc");
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mapBuffers
+ *
+ * DESCRIPTION: map stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBuffers()
+{
+ int32_t rc = NO_ERROR;
+ QCameraBufferMaps bufferMaps;
+
+ rc = mAllocator.waitForBackgroundTask(mAllocTaskId);
+ if (rc != NO_ERROR) {
+ LOGE("Allocation Failed");
+ return NO_MEMORY;
+ }
+
+ if (mStreamBufs == NULL) {
+ LOGE("Stream buffers not allocated");
+ return UNKNOWN_ERROR;
+ }
+
+ uint8_t numBufsToMap = mStreamBufs->getMappable();
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ ssize_t bufSize = mStreamBufs->getSize(i);
+ if (BAD_INDEX != bufSize) {
+ rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF, mHandle,
+ i /*buf index*/, -1 /*plane index*/, 0 /*cookie*/,
+ mStreamBufs->getFd(i), bufSize);
+
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ rc = BAD_INDEX;
+ break;
+ }
+ } else {
+ LOGE("Bad index %u", i);
+ rc = BAD_INDEX;
+ break;
+ }
+ }
+
+ cam_buf_map_type_list bufMapList;
+ if (rc == NO_ERROR) {
+ rc = bufferMaps.getCamBufMapList(bufMapList);
+ }
+ if (rc == NO_ERROR) {
+ rc = mapBufs(bufMapList, NULL);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : allocateBatchBufs
+ *
+ * DESCRIPTION: allocate stream batch buffers and stream buffers
+ *
+ * PARAMETERS :
+ * @offset : offset info of stream buffers
+ * @num_bufs : number of buffers allocated
+ * @initial_reg_flag: flag to indicate if buffer needs to be registered
+ * at kernel initially
+ * @bufs : output of allocated buffers
+ * @plane_bufs : output of allocated plane buffers
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::allocateBatchBufs(cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs, uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs, mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int rc = NO_ERROR;
+ uint8_t *regFlags;
+ QCameraBufferMaps bufferMaps;
+ QCameraBufferMaps planeBufferMaps;
+
+ mFrameLenOffset = *offset;
+
+ LOGH("Batch Buffer allocation stream type = %d", getMyType());
+
+ //Allocate stream batch buffer
+ mStreamBatchBufs = mAllocator.allocateStreamUserBuf (mStreamInfo);
+ if (!mStreamBatchBufs) {
+ LOGE("Failed to allocate stream batch buffers");
+ return NO_MEMORY;
+ }
+
+ uint8_t numBufsToMap = mStreamBatchBufs->getMappable();
+
+ //map batch buffers
+ for (uint32_t i = 0; i < numBufsToMap; i++) {
+ rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
+ 0 /*stream id*/, i /*buf index*/, -1 /*plane index*/,
+ 0 /*cookie*/, mStreamBatchBufs->getFd(i), mNumBufs);
+
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ rc = BAD_INDEX;
+ break;
+ }
+ }
+
+ cam_buf_map_type_list bufMapList;
+ if (rc == NO_ERROR) {
+ rc = bufferMaps.getCamBufMapList(bufMapList);
+ }
+ if (rc == NO_ERROR) {
+ rc = mapBufs(bufMapList, ops_tbl);
+ }
+ if (rc < 0) {
+ LOGE("Failed to map stream batch buffers");
+ mStreamBatchBufs->deallocate();
+ delete mStreamBatchBufs;
+ mStreamBatchBufs = NULL;
+ return NO_MEMORY;
+ }
+
+ /*calculate stream Buffer count*/
+ mNumPlaneBufs =
+ (mNumBufs * mStreamInfo->user_buf_info.frame_buf_cnt);
+
+ /* For some stream types, buffer allocation may have already begun
+ * preemptively. If this is the case, we need to wait for the
+ * preemptive allocation to complete before proceeding. */
+ mAllocator.waitForDeferredAlloc(mStreamInfo->stream_type);
+
+ //Allocate stream buffer
+ mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+ mFrameLenOffset.frame_len,mFrameLenOffset.mp[0].stride,
+ mFrameLenOffset.mp[0].scanline,mNumPlaneBufs);
+ if (!mStreamBufs) {
+ LOGE("Failed to allocate stream buffers");
+ rc = NO_MEMORY;
+ goto err1;
+ }
+
+ //Map plane stream buffers
+ for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+ ssize_t bufSize = mStreamBufs->getSize(i);
+ if (BAD_INDEX != bufSize) {
+ rc = planeBufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+ 0 /*stream id*/, i /*buf index*/, -1 /*plane index*/,
+ 0 /*cookie*/, mStreamBufs->getFd(i), bufSize);
+
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ rc = INVALID_OPERATION;
+ goto err1;
+ }
+ } else {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ rc = INVALID_OPERATION;
+ goto err1;
+ }
+ }
+
+ cam_buf_map_type_list planeBufMapList;
+ rc = planeBufferMaps.getCamBufMapList(planeBufMapList);
+ if (rc == NO_ERROR) {
+ rc = mapBufs(planeBufMapList, ops_tbl);
+ }
+
+ if (rc < 0) {
+ LOGE("map_stream_buf failed: %d", rc);
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ rc = INVALID_OPERATION;
+ goto err1;
+ }
+
+ LOGD("BATCH Buf Count = %d, Plane Buf Cnt = %d",
+ mNumBufs, mNumPlaneBufs);
+
+ //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+ regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+ if (!regFlags) {
+ LOGE("Out of memory");
+ for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ rc = NO_MEMORY;
+ goto err1;
+ }
+ memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+ for (uint32_t i = 0; i < mNumBufs; i++) {
+ regFlags[i] = 1;
+ }
+
+ mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+ if (mBufDefs == NULL) {
+ LOGE("getRegFlags failed %d", rc);
+ for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ free(regFlags);
+ regFlags = NULL;
+ rc = INVALID_OPERATION;
+ goto err1;
+ }
+ memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+
+ mPlaneBufDefs = (mm_camera_buf_def_t *)
+ malloc(mNumPlaneBufs * (sizeof(mm_camera_buf_def_t)));
+ if (mPlaneBufDefs == NULL) {
+ LOGE("No Memory");
+ free(regFlags);
+ regFlags = NULL;
+ free(mBufDefs);
+ mBufDefs = NULL;
+ for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+ }
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ free(regFlags);
+ regFlags = NULL;
+ rc = INVALID_OPERATION;
+ goto err1;
+ }
+ memset(mPlaneBufDefs, 0,
+ mNumPlaneBufs * (sizeof(mm_camera_buf_def_t)));
+
+ for (uint32_t i = 0; i < mStreamInfo->num_bufs; i++) {
+ mStreamBatchBufs->getUserBufDef(mStreamInfo->user_buf_info,
+ mBufDefs[i], i, mFrameLenOffset, mPlaneBufDefs,
+ mStreamBufs);
+ }
+
+ *num_bufs = mNumBufs;
+ *initial_reg_flag = regFlags;
+ *bufs = mBufDefs;
+ LOGH("stream type: %d, numBufs: %d mNumPlaneBufs: %d",
+ mStreamInfo->stream_type, mNumBufs, mNumPlaneBufs);
+
+ return NO_ERROR;
+
+err1:
+ mStreamBatchBufs->deallocate();
+ delete mStreamBatchBufs;
+ mStreamBatchBufs = NULL;
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : releaseBuffs
+ *
+ * DESCRIPTION: method to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseBuffs()
+{
+ int rc = NO_ERROR;
+
+ if (mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ return releaseBatchBufs(NULL);
+ }
+
+ if ((NULL != mBufDefs) && (mStreamBufs != NULL)) {
+ uint8_t numBufsToUnmap = mStreamBufs->getMappable();
+ for (uint32_t i = 0; i < numBufsToUnmap; i++) {
+ rc = unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+ if (rc < 0) {
+ LOGE("map_stream_buf failed: %d", rc);
+ }
+ }
+
+ // mBufDefs just keep a ptr to the buffer
+ // mm-camera-interface own the buffer, so no need to free
+ mBufDefs = NULL;
+ memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+ }
+ if (!mStreamBufsAcquired && (mStreamBufs != NULL)) {
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseBatchBufs
+ *
+ * DESCRIPTION: method to deallocate stream buffers and batch buffers
+ *
+ * PARAMETERS :
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+
+ *==========================================================================*/
+int32_t QCameraStream::releaseBatchBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int rc = NO_ERROR;
+
+ if (NULL != mPlaneBufDefs) {
+ for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+ rc = unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+ if (rc < 0) {
+ LOGE("map_stream_buf failed: %d", rc);
+ }
+ }
+
+ // mBufDefs just keep a ptr to the buffer
+ // mm-camera-interface own the buffer, so no need to free
+ mPlaneBufDefs = NULL;
+ memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+ mNumPlaneBufs = 0;
+ }
+
+ if (mStreamBufs != NULL) {
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ }
+
+ mBufDefs = NULL;
+
+ if (mStreamBatchBufs != NULL) {
+ for (uint8_t i = 0; i < mStreamBatchBufs->getCnt(); i++) {
+ unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF, i, -1, ops_tbl);
+ }
+ mStreamBatchBufs->deallocate();
+ delete mStreamBatchBufs;
+ mStreamBatchBufs = NULL;
+ }
+ return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION : BufAllocRoutine
+ *
+ * DESCRIPTION: function to allocate additional stream buffers
+ *
+ * PARAMETERS :
+ * @data : user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+void *QCameraStream::BufAllocRoutine(void *data)
+{
+ QCameraStream *pme = (QCameraStream *)data;
+ int32_t rc = NO_ERROR;
+
+ LOGH("E");
+ pme->cond_wait();
+ if (pme->mNumBufsNeedAlloc > 0) {
+ uint8_t numBufAlloc = (uint8_t)(pme->mNumBufs - pme->mNumBufsNeedAlloc);
+ rc = pme->mAllocator.allocateMoreStreamBuf(pme->mStreamBufs,
+ pme->mFrameLenOffset.frame_len,
+ pme->mNumBufsNeedAlloc);
+ if (rc != NO_ERROR) {
+ LOGE("Failed to allocate buffers");
+ pme->mNumBufsNeedAlloc = 0;
+ return NULL;
+ }
+
+ pme->mNumBufsNeedAlloc = 0;
+ QCameraBufferMaps bufferMaps;
+ for (uint32_t i = numBufAlloc; i < pme->mNumBufs; i++) {
+ ssize_t bufSize = pme->mStreamBufs->getSize(i);
+ if (BAD_INDEX == bufSize) {
+ LOGE("Failed to retrieve buffer size (bad index)");
+ return NULL;
+ }
+
+ rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+ pme->mHandle, i /*buf index*/, -1 /*plane index*/,
+ 0 /*cookie*/, pme->mStreamBufs->getFd(i), bufSize);
+
+ if (rc < 0) {
+ LOGE("Failed to map buffers");
+ return NULL;
+ }
+ }
+
+ cam_buf_map_type_list bufMapList;
+ rc = bufferMaps.getCamBufMapList(bufMapList);
+ if (rc == NO_ERROR) {
+ rc = pme->m_MemOpsTbl.bundled_map_ops(&bufMapList, pme->m_MemOpsTbl.userdata);
+ }
+ if (rc != 0) {
+ LOGE("Failed to map buffers with return code %d", rc);
+ return NULL;
+ }
+
+ for (uint32_t i = numBufAlloc; i < pme->mNumBufs; i++) {
+ pme->mStreamBufs->getBufDef(pme->mFrameLenOffset, pme->mBufDefs[i], i);
+ pme->mCamOps->qbuf(pme->mCamHandle, pme->mChannelHandle,
+ &pme->mBufDefs[i]);
+ }
+ }
+ LOGH("X");
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : cond_signal
+ *
+ * DESCRIPTION: signal if flag "wait_for_cond" is set
+ *
+ *==========================================================================*/
+void QCameraStream::cond_signal(bool forceExit)
+{
+ pthread_mutex_lock(&m_lock);
+ if(wait_for_cond == TRUE){
+ wait_for_cond = FALSE;
+ if (forceExit) {
+ mNumBufsNeedAlloc = 0;
+ }
+ pthread_cond_signal(&m_cond);
+ }
+ pthread_mutex_unlock(&m_lock);
+}
+
+
+/*===========================================================================
+ * FUNCTION : cond_wait
+ *
+ * DESCRIPTION: wait on if flag "wait_for_cond" is set
+ *
+ *==========================================================================*/
+void QCameraStream::cond_wait()
+{
+ pthread_mutex_lock(&m_lock);
+ while (wait_for_cond == TRUE) {
+ pthread_cond_wait(&m_cond, &m_lock);
+ }
+ pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : putBufs
+ *
+ * DESCRIPTION: deallocate stream buffers
+ *
+ * PARAMETERS :
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ int rc = NO_ERROR;
+
+ if (mBufAllocPid != 0) {
+ cond_signal(true);
+ LOGL("wait for buf allocation thread dead");
+ pthread_join(mBufAllocPid, NULL);
+ mBufAllocPid = 0;
+ LOGL("return from buf allocation thread");
+ }
+
+ uint8_t numBufsToUnmap = mStreamBufs->getMappable();
+ for (uint32_t i = 0; i < numBufsToUnmap; i++) {
+ rc = ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ if (rc < 0) {
+ LOGE("map_stream_buf failed: %d", rc);
+ }
+ }
+ mBufDefs = NULL; // mBufDefs just keep a ptr to the buffer
+ // mm-camera-interface own the buffer, so no need to free
+ memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+ if ( !mStreamBufsAcquired ) {
+ mStreamBufs->deallocate();
+ delete mStreamBufs;
+ mStreamBufs = NULL;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : putBufsDeffered
+ *
+ * DESCRIPTION: function to deallocate deffered stream buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::putBufsDeffered()
+{
+ if (mBufAllocPid != 0) {
+ cond_signal(true);
+ LOGH("%s: wait for buf allocation thread dead", __func__);
+ // Wait for the allocation of additional stream buffers
+ pthread_join(mBufAllocPid, NULL);
+ mBufAllocPid = 0;
+ LOGH("%s: return from buf allocation thread", __func__);
+ }
+ // Deallocation of the deffered stream buffers handled separately
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : invalidateBuf
+ *
+ * DESCRIPTION: invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer to invalidate
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidateBuf(uint32_t index)
+{
+ if (mStreamBufs == NULL) {
+ LOGE("Invalid Operation");
+ return INVALID_OPERATION;
+ }
+ return mStreamBufs->invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION : cleanInvalidateBuf
+ *
+ * DESCRIPTION: clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ * @index : index of the buffer to clean invalidate
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::cleanInvalidateBuf(uint32_t index)
+{
+ if (mStreamBufs == NULL) {
+ LOGE("Invalid Operation");
+ return INVALID_OPERATION;
+ }
+ return mStreamBufs->cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION : isTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the stream is of the queried type
+ *
+ * PARAMETERS :
+ * @type : stream type as of queried
+ *
+ * RETURN : true/false
+ *==========================================================================*/
+bool QCameraStream::isTypeOf(cam_stream_type_t type)
+{
+ if (mStreamInfo != NULL && (mStreamInfo->stream_type == type)) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : isOrignalTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the original stream is of the
+ * queried type if it's reproc stream
+ *
+ * PARAMETERS :
+ * @type : stream type as of queried
+ *
+ * RETURN : true/false
+ *==========================================================================*/
+bool QCameraStream::isOrignalTypeOf(cam_stream_type_t type)
+{
+ if (mStreamInfo != NULL &&
+ mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+ mStreamInfo->reprocess_config.pp_type == CAM_ONLINE_REPROCESS_TYPE &&
+ mStreamInfo->reprocess_config.online.input_stream_type == type) {
+ return true;
+ } else if (
+ mStreamInfo != NULL &&
+ mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+ mStreamInfo->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE &&
+ mStreamInfo->reprocess_config.offline.input_type == type) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getMyType
+ *
+ * DESCRIPTION: return stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyType()
+{
+ if (mStreamInfo != NULL) {
+ return mStreamInfo->stream_type;
+ } else {
+ return CAM_STREAM_TYPE_DEFAULT;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getMyOriginalType
+ *
+ * DESCRIPTION: return stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyOriginalType()
+{
+ if (mStreamInfo != NULL) {
+ if (mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+ mStreamInfo->reprocess_config.pp_type == CAM_ONLINE_REPROCESS_TYPE) {
+ return mStreamInfo->reprocess_config.online.input_stream_type;
+ } else if (mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+ mStreamInfo->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+ return mStreamInfo->reprocess_config.offline.input_type;
+ } else {
+ return mStreamInfo->stream_type;
+ }
+ } else {
+ return CAM_STREAM_TYPE_DEFAULT;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : getFrameOffset
+ *
+ * DESCRIPTION: query stream buffer frame offset info
+ *
+ * PARAMETERS :
+ * @offset : reference to struct to store the queried frame offset info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameOffset(cam_frame_len_offset_t &offset)
+{
+ if (NULL == mStreamInfo) {
+ return NO_INIT;
+ }
+
+ offset = mFrameLenOffset;
+ if ((ROTATE_90 == mOnlineRotation) || (ROTATE_270 == mOnlineRotation)
+ || (offset.frame_len == 0) || (offset.num_planes == 0)) {
+ // Re-calculate frame offset in case of online rotation
+ cam_stream_info_t streamInfo = *mStreamInfo;
+ getFrameDimension(streamInfo.dim);
+ calcOffset(&streamInfo);
+ offset = streamInfo.buf_planes.plane_info;
+ }
+
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : getCropInfo
+ *
+ * DESCRIPTION: query crop info of the stream
+ *
+ * PARAMETERS :
+ * @crop : reference to struct to store the queried crop info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getCropInfo(cam_rect_t &crop)
+{
+ pthread_mutex_lock(&mCropLock);
+ crop = mCropInfo;
+ pthread_mutex_unlock(&mCropLock);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setCropInfo
+ *
+ * DESCRIPTION: set crop info of the stream
+ *
+ * PARAMETERS :
+ * @crop : struct to store new crop info
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setCropInfo(cam_rect_t crop)
+{
+ pthread_mutex_lock(&mCropLock);
+ mCropInfo = crop;
+ pthread_mutex_unlock(&mCropLock);
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getFrameDimension
+ *
+ * DESCRIPTION: query stream frame dimension info
+ *
+ * PARAMETERS :
+ * @dim : reference to struct to store the queried frame dimension
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameDimension(cam_dimension_t &dim)
+{
+ if (mStreamInfo != NULL) {
+ if ((ROTATE_90 == mOnlineRotation) || (ROTATE_270 == mOnlineRotation)) {
+ dim.width = mStreamInfo->dim.height;
+ dim.height = mStreamInfo->dim.width;
+ } else {
+ dim = mStreamInfo->dim;
+ }
+ return 0;
+ }
+ return -1;
+}
+
+/*===========================================================================
+ * FUNCTION : getFormat
+ *
+ * DESCRIPTION: query stream format
+ *
+ * PARAMETERS :
+ * @fmt : reference to stream format
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFormat(cam_format_t &fmt)
+{
+ if (mStreamInfo != NULL) {
+ fmt = mStreamInfo->fmt;
+ return 0;
+ }
+ return -1;
+}
+
+/*===========================================================================
+ * FUNCTION : getMyServerID
+ *
+ * DESCRIPTION: query server stream ID
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : stream ID from server
+ *==========================================================================*/
+uint32_t QCameraStream::getMyServerID() {
+ if (mStreamInfo != NULL) {
+ return mStreamInfo->stream_svr_id;
+ } else {
+ return 0;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : acquireStreamBufs
+ *
+ * DESCRIPTION: acquire stream buffers and postpone their release.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::acquireStreamBufs()
+{
+ mStreamBufsAcquired = true;
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : mapBuf
+ *
+ * DESCRIPTION: map stream related buffer to backend server
+ *
+ * PARAMETERS :
+ * @buf_type : mapping type of buffer
+ * @buf_idx : index of buffer
+ * @plane_idx: plane index
+ * @fd : fd of the buffer
+ * @size : lenght of the buffer
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBuf(uint8_t buf_type, uint32_t buf_idx,
+ int32_t plane_idx, int fd, size_t size, mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ cam_buf_map_type_list bufMapList;
+ int32_t rc = QCameraBufferMaps::makeSingletonBufMapList(
+ (cam_mapping_buf_type)buf_type, mHandle, buf_idx, plane_idx,
+ 0 /*cookie*/, fd, size, bufMapList);
+
+ if (rc != NO_ERROR) {
+ return rc;
+ }
+
+ return mapBufs(bufMapList, ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION : mapBufs
+ *
+ * DESCRIPTION: map stream related buffers to backend server
+ *
+ * PARAMETERS :
+ * @bufMapList : buffer mapping information
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+
+int32_t QCameraStream::mapBufs(cam_buf_map_type_list bufMapList,
+ __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ if (m_MemOpsTbl.bundled_map_ops != NULL) {
+ return m_MemOpsTbl.bundled_map_ops(&bufMapList, m_MemOpsTbl.userdata);
+ } else {
+ return mCamOps->map_stream_bufs(mCamHandle, mChannelHandle,
+ &bufMapList);
+ }
+
+}
+
+/*===========================================================================
+ * FUNCTION : unmapBuf
+ *
+ * DESCRIPTION: unmap stream related buffer to backend server
+ *
+ * PARAMETERS :
+ * @buf_type : mapping type of buffer
+ * @buf_idx : index of buffer
+ * @plane_idx: plane index
+ * @ops_tbl : ptr to buf mapping/unmapping ops
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+ if (ops_tbl != NULL) {
+ return ops_tbl->unmap_ops(buf_idx, plane_idx,
+ (cam_mapping_buf_type)buf_type, ops_tbl->userdata);
+ } else {
+ return mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle,
+ mHandle, buf_type, buf_idx, plane_idx);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : setParameter
+ *
+ * DESCRIPTION: set stream based parameters
+ *
+ * PARAMETERS :
+ * @param : ptr to parameters to be set
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setParameter(cam_stream_parm_buffer_t &param)
+{
+ int32_t rc = NO_ERROR;
+ pthread_mutex_lock(&mParameterLock);
+ mStreamInfo->parm_buf = param;
+ rc = mCamOps->set_stream_parms(mCamHandle,
+ mChannelHandle,
+ mHandle,
+ &mStreamInfo->parm_buf);
+ if (rc == NO_ERROR) {
+ param = mStreamInfo->parm_buf;
+ }
+ pthread_mutex_unlock(&mParameterLock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : getParameter
+ *
+ * DESCRIPTION: get stream based parameters
+ *
+ * PARAMETERS :
+ * @param : ptr to parameters to be red
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getParameter(cam_stream_parm_buffer_t &param)
+{
+ int32_t rc = NO_ERROR;
+ pthread_mutex_lock(&mParameterLock);
+ mStreamInfo->parm_buf = param;
+ rc = mCamOps->get_stream_parms(mCamHandle,
+ mChannelHandle,
+ mHandle,
+ &mStreamInfo->parm_buf);
+ if (rc == NO_ERROR) {
+ param = mStreamInfo->parm_buf;
+ }
+ pthread_mutex_unlock(&mParameterLock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : releaseFrameData
+ *
+ * DESCRIPTION: callback function to release frame data node
+ *
+ * PARAMETERS :
+ * @data : ptr to post process input data
+ * @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraStream::releaseFrameData(void *data, void *user_data)
+{
+ QCameraStream *pme = (QCameraStream *)user_data;
+ mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)data;
+ if (NULL != pme) {
+ pme->bufDone(frame->bufs[0]->buf_idx);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : configStream
+ *
+ * DESCRIPTION: send stream configuration to back end
+ *
+ * PARAMETERS :
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::configStream()
+{
+ int rc = NO_ERROR;
+
+ // Configure the stream
+ mm_camera_stream_config_t stream_config;
+ stream_config.stream_info = mStreamInfo;
+ stream_config.mem_vtbl = mMemVtbl;
+ stream_config.stream_cb_sync = NULL;
+ stream_config.stream_cb = dataNotifyCB;
+ stream_config.padding_info = mPaddingInfo;
+ stream_config.userdata = this;
+ rc = mCamOps->config_stream(mCamHandle,
+ mChannelHandle, mHandle, &stream_config);
+ if (rc < 0) {
+ LOGE("Failed to config stream, rc = %d", rc);
+ mCamOps->unmap_stream_buf(mCamHandle,
+ mChannelHandle,
+ mHandle,
+ CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+ 0,
+ -1);
+ return UNKNOWN_ERROR;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : setSyncDataCB
+ *
+ * DESCRIPTION: register callback with mm-interface for this stream
+ *
+ * PARAMETERS :
+ @stream_cb : Callback function
+ *
+ * RETURN : int32_t type of status
+ * NO_ERROR -- success
+ * non-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setSyncDataCB(stream_cb_routine data_cb)
+{
+ int32_t rc = NO_ERROR;
+
+ if (mCamOps != NULL) {
+ mSYNCDataCB = data_cb;
+ rc = mCamOps->register_stream_buf_cb(mCamHandle,
+ mChannelHandle, mHandle, dataNotifySYNCCB, MM_CAMERA_STREAM_CB_TYPE_SYNC,
+ this);
+ if (rc == NO_ERROR) {
+ mSyncCBEnabled = TRUE;
+ return rc;
+ }
+ }
+ LOGE("Interface handle is NULL");
+ return UNKNOWN_ERROR;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraStream.h b/camera/QCamera2/HAL/QCameraStream.h
new file mode 100644
index 0000000..fc5c443
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStream.h
@@ -0,0 +1,272 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STREAM_H__
+#define __QCAMERA_STREAM_H__
+
+// Camera dependencies
+#include "camera.h"
+#include "QCameraCmdThread.h"
+#include "QCameraMem.h"
+#include "QCameraAllocator.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraStream;
+typedef void (*stream_cb_routine)(mm_camera_super_buf_t *frame,
+ QCameraStream *stream,
+ void *userdata);
+
+#define CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE 16
+#define CAMERA_MIN_VIDEO_BATCH_BUFFERS 3
+
+
+class QCameraStream
+{
+public:
+ QCameraStream(QCameraAllocator &allocator,
+ uint32_t camHandle, uint32_t chId,
+ mm_camera_ops_t *camOps, cam_padding_info_t *paddingInfo,
+ bool deffered = false, cam_rotation_t online_rotation = ROTATE_0);
+ virtual ~QCameraStream();
+ virtual int32_t init(QCameraHeapMemory *streamInfoBuf,
+ QCameraHeapMemory *miscBuf,
+ uint8_t minStreamBufNum,
+ stream_cb_routine stream_cb,
+ void *userdata,
+ bool bDynallocBuf);
+ virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+ cam_crop_data_t &crop_info);
+ virtual int32_t bufDone(uint32_t index);
+ virtual int32_t bufDone(const void *opaque, bool isMetaData);
+ virtual int32_t processDataNotify(mm_camera_super_buf_t *bufs);
+ virtual int32_t start();
+ virtual int32_t stop();
+
+ /* Used for deffered allocation of buffers */
+ virtual int32_t allocateBuffers();
+ virtual int32_t mapBuffers();
+ virtual int32_t releaseBuffs();
+
+ static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame, void *userdata);
+ static void dataNotifySYNCCB(mm_camera_super_buf_t *recvd_frame,
+ void *userdata);
+ static void *dataProcRoutine(void *data);
+ static void *BufAllocRoutine(void *data);
+ uint32_t getMyHandle() const {return mHandle;}
+ bool isTypeOf(cam_stream_type_t type);
+ bool isOrignalTypeOf(cam_stream_type_t type);
+ int32_t getFrameOffset(cam_frame_len_offset_t &offset);
+ int32_t getCropInfo(cam_rect_t &crop);
+ int32_t setCropInfo(cam_rect_t crop);
+ int32_t getFrameDimension(cam_dimension_t &dim);
+ int32_t getFormat(cam_format_t &fmt);
+ QCameraMemory *getStreamBufs() {return mStreamBufs;};
+ QCameraHeapMemory *getStreamInfoBuf() {return mStreamInfoBuf;};
+ QCameraHeapMemory *getMiscBuf() {return mMiscBuf;};
+ uint32_t getMyServerID();
+ cam_stream_type_t getMyType();
+ cam_stream_type_t getMyOriginalType();
+ int32_t acquireStreamBufs();
+
+ int32_t mapBuf(uint8_t buf_type, uint32_t buf_idx,
+ int32_t plane_idx, int fd, size_t size,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+ int32_t mapBufs(cam_buf_map_type_list bufMapList,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+ int32_t mapNewBuffer(uint32_t index);
+ int32_t unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+ int32_t setParameter(cam_stream_parm_buffer_t &param);
+ int32_t getParameter(cam_stream_parm_buffer_t &param);
+ int32_t syncRuntimeParams();
+ cam_stream_parm_buffer_t getOutputCrop() { return m_OutputCrop;};
+ cam_stream_parm_buffer_t getImgProp() { return m_ImgProp;};
+
+ static void releaseFrameData(void *data, void *user_data);
+ int32_t configStream();
+ bool isDeffered() const { return mDefferedAllocation; }
+ bool isSyncCBEnabled() {return mSyncCBEnabled;};
+ void deleteStream();
+
+ uint8_t getBufferCount() { return mNumBufs; }
+ uint32_t getChannelHandle() { return mChannelHandle; }
+ int32_t getNumQueuedBuf();
+
+ uint32_t mDumpFrame;
+ uint32_t mDumpMetaFrame;
+ uint32_t mDumpSkipCnt;
+
+ void cond_wait();
+ void cond_signal(bool forceExit = false);
+
+ int32_t setSyncDataCB(stream_cb_routine data_cb);
+ //Stream time stamp. We need this for preview stream to update display
+ nsecs_t mStreamTimestamp;
+
+ //Frame Buffer will be stored here in case framework batch mode.
+ camera_memory_t *mCurMetaMemory; // Current metadata buffer ptr
+ int8_t mCurBufIndex; // Buffer count filled in current metadata
+ int8_t mCurMetaIndex; // Active metadata buffer index
+
+ nsecs_t mFirstTimeStamp; // Timestamp of first frame in Metadata.
+
+ // Buffer storage structure.
+ typedef struct {
+ bool consumerOwned; // Metadata is with Consumer if TRUE
+ uint8_t numBuffers; // Num of buffer need to released
+ uint8_t buf_index[CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE];
+ } MetaMemory;
+ MetaMemory mStreamMetaMemory[CAMERA_MIN_VIDEO_BATCH_BUFFERS];
+
+private:
+ uint32_t mCamHandle;
+ uint32_t mChannelHandle;
+ uint32_t mHandle; // stream handle from mm-camera-interface
+ mm_camera_ops_t *mCamOps;
+ cam_stream_info_t *mStreamInfo; // ptr to stream info buf
+ mm_camera_stream_mem_vtbl_t mMemVtbl;
+ uint8_t mNumBufs;
+ uint8_t mNumPlaneBufs;
+ uint8_t mNumBufsNeedAlloc;
+ uint8_t *mRegFlags;
+ stream_cb_routine mDataCB;
+ stream_cb_routine mSYNCDataCB;
+ void *mUserData;
+
+ QCameraQueue mDataQ;
+ QCameraCmdThread mProcTh; // thread for dataCB
+
+ QCameraHeapMemory *mStreamInfoBuf;
+ QCameraHeapMemory *mMiscBuf;
+ QCameraMemory *mStreamBufs;
+ QCameraMemory *mStreamBatchBufs;
+ QCameraAllocator &mAllocator;
+ mm_camera_buf_def_t *mBufDefs;
+ mm_camera_buf_def_t *mPlaneBufDefs;
+ cam_frame_len_offset_t mFrameLenOffset;
+ cam_padding_info_t mPaddingInfo;
+ cam_rect_t mCropInfo;
+ cam_rotation_t mOnlineRotation;
+ pthread_mutex_t mCropLock; // lock to protect crop info
+ pthread_mutex_t mParameterLock; // lock to sync access to parameters
+ bool mStreamBufsAcquired;
+ bool m_bActive; // if stream mProcTh is active
+ bool mDynBufAlloc; // allow buf allocation in 2 steps
+ pthread_t mBufAllocPid;
+ mm_camera_map_unmap_ops_tbl_t m_MemOpsTbl;
+ cam_stream_parm_buffer_t m_OutputCrop;
+ cam_stream_parm_buffer_t m_ImgProp;
+
+ static int32_t get_bufs(
+ cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+
+ static int32_t get_bufs_deffered(
+ cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+
+ static int32_t put_bufs(
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+
+ static int32_t put_bufs_deffered(
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+
+ static int32_t set_config_ops(
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+
+ static int32_t invalidate_buf(uint32_t index, void *user_data);
+ static int32_t clean_invalidate_buf(uint32_t index, void *user_data);
+
+ static int32_t backgroundAllocate(void* data);
+ static int32_t backgroundMap(void* data);
+
+ int32_t getBufs(cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+ int32_t getBufsDeferred(cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+ int32_t putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+ int32_t putBufsDeffered();
+
+ /* Used for deffered allocation of buffers */
+ int32_t allocateBatchBufs(cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs, uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs, mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+
+ int32_t releaseBatchBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+
+ int32_t invalidateBuf(uint32_t index);
+ int32_t cleanInvalidateBuf(uint32_t index);
+ int32_t calcOffset(cam_stream_info_t *streamInfo);
+ int32_t unmapStreamInfoBuf();
+ int32_t releaseStreamInfoBuf();
+ int32_t releaseMiscBuf();
+ int32_t mapBufs(QCameraMemory *heapBuf, cam_mapping_buf_type bufType,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+ int32_t unMapBuf(QCameraMemory *heapBuf, cam_mapping_buf_type bufType,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+
+ bool mDefferedAllocation;
+
+ bool wait_for_cond;
+ pthread_mutex_t m_lock;
+ pthread_cond_t m_cond;
+
+ BackgroundTask mAllocTask;
+ uint32_t mAllocTaskId;
+ BackgroundTask mMapTask;
+ uint32_t mMapTaskId;
+
+ bool mSyncCBEnabled;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STREAM_H__ */
diff --git a/camera/QCamera2/HAL/QCameraThermalAdapter.cpp b/camera/QCamera2/HAL/QCameraThermalAdapter.cpp
new file mode 100644
index 0000000..7579f9a
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraThermalAdapter.cpp
@@ -0,0 +1,177 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraThermalAdapter"
+
+// System dependencies
+#include <dlfcn.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+
+QCameraThermalAdapter& QCameraThermalAdapter::getInstance()
+{
+ static QCameraThermalAdapter instance;
+ return instance;
+}
+
+QCameraThermalAdapter::QCameraThermalAdapter() :
+ mCallback(NULL),
+ mHandle(NULL),
+ mRegister(NULL),
+ mUnregister(NULL),
+ mCameraHandle(0),
+ mCamcorderHandle(0)
+{
+}
+
+int QCameraThermalAdapter::init(QCameraThermalCallback *thermalCb)
+{
+ const char *error = NULL;
+ int rc = NO_ERROR;
+
+ LOGD("E");
+ mHandle = dlopen("/vendor/lib/libthermalclient.so", RTLD_NOW);
+ if (!mHandle) {
+ error = dlerror();
+ LOGE("dlopen failed with error %s",
+ error ? error : "");
+ rc = UNKNOWN_ERROR;
+ goto error;
+ }
+ *(void **)&mRegister = dlsym(mHandle, "thermal_client_register_callback");
+ if (!mRegister) {
+ error = dlerror();
+ LOGE("dlsym failed with error code %s",
+ error ? error: "");
+ rc = UNKNOWN_ERROR;
+ goto error2;
+ }
+ *(void **)&mUnregister = dlsym(mHandle, "thermal_client_unregister_callback");
+ if (!mUnregister) {
+ error = dlerror();
+ LOGE("dlsym failed with error code %s",
+ error ? error: "");
+ rc = UNKNOWN_ERROR;
+ goto error2;
+ }
+
+ mCallback = thermalCb;
+
+ // Register camera and camcorder callbacks
+ mCameraHandle = mRegister(mStrCamera, thermalCallback, NULL);
+ if (mCameraHandle < 0) {
+ LOGE("thermal_client_register_callback failed %d",
+ mCameraHandle);
+ rc = UNKNOWN_ERROR;
+ goto error2;
+ }
+ mCamcorderHandle = mRegister(mStrCamcorder, thermalCallback, NULL);
+ if (mCamcorderHandle < 0) {
+ LOGE("thermal_client_register_callback failed %d",
+ mCamcorderHandle);
+ rc = UNKNOWN_ERROR;
+ goto error3;
+ }
+
+ LOGD("X");
+ return rc;
+
+error3:
+ mCamcorderHandle = 0;
+ mUnregister(mCameraHandle);
+error2:
+ mCameraHandle = 0;
+ dlclose(mHandle);
+ mHandle = NULL;
+error:
+ LOGD("X");
+ return rc;
+}
+
+void QCameraThermalAdapter::deinit()
+{
+ LOGD("E");
+ if (mUnregister) {
+ if (mCameraHandle) {
+ mUnregister(mCameraHandle);
+ mCameraHandle = 0;
+ }
+ if (mCamcorderHandle) {
+ mUnregister(mCamcorderHandle);
+ mCamcorderHandle = 0;
+ }
+ }
+ if (mHandle)
+ dlclose(mHandle);
+
+ mHandle = NULL;
+ mRegister = NULL;
+ mUnregister = NULL;
+ mCallback = NULL;
+ LOGD("X");
+}
+
+char QCameraThermalAdapter::mStrCamera[] = "camera";
+char QCameraThermalAdapter::mStrCamcorder[] = "camcorder";
+
+int QCameraThermalAdapter::thermalCallback(int level,
+ void *userdata, void *data)
+{
+ int rc = 0;
+ LOGD("E");
+ QCameraThermalCallback *mcb = getInstance().mCallback;
+
+ if (mcb) {
+ mcb->setThermalLevel((qcamera_thermal_level_enum_t) level);
+ rc = mcb->thermalEvtHandle(mcb->getThermalLevel(), userdata, data);
+ }
+ LOGD("X");
+ return rc;
+}
+
+qcamera_thermal_level_enum_t *QCameraThermalCallback::getThermalLevel() {
+ return &mLevel;
+}
+
+void QCameraThermalCallback::setThermalLevel(qcamera_thermal_level_enum_t level) {
+ mLevel = level;
+}
+}; //namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraThermalAdapter.h b/camera/QCamera2/HAL/QCameraThermalAdapter.h
new file mode 100644
index 0000000..9afc90f
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraThermalAdapter.h
@@ -0,0 +1,91 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_THERMAL_ADAPTER__
+#define __QCAMERA_THERMAL_ADAPTER__
+
+namespace qcamera {
+
+typedef enum {
+ QCAMERA_THERMAL_NO_ADJUSTMENT = 0,
+ QCAMERA_THERMAL_SLIGHT_ADJUSTMENT,
+ QCAMERA_THERMAL_BIG_ADJUSTMENT,
+ QCAMERA_THERMAL_MAX_ADJUSTMENT,
+ QCAMERA_THERMAL_SHUTDOWN = 10
+} qcamera_thermal_level_enum_t;
+
+typedef enum {
+ QCAMERA_THERMAL_ADJUST_FPS,
+ QCAMERA_THERMAL_ADJUST_FRAMESKIP,
+} qcamera_thermal_mode;
+
+class QCameraThermalCallback
+{
+public:
+ virtual int thermalEvtHandle(qcamera_thermal_level_enum_t *level,
+ void *userdata, void *data) = 0;
+ virtual ~QCameraThermalCallback() {}
+ qcamera_thermal_level_enum_t *getThermalLevel();
+ void setThermalLevel(qcamera_thermal_level_enum_t level);
+
+private:
+ qcamera_thermal_level_enum_t mLevel;
+};
+
+class QCameraThermalAdapter
+{
+public:
+ static QCameraThermalAdapter& getInstance();
+
+ int init(QCameraThermalCallback *thermalCb);
+ void deinit();
+
+private:
+ static char mStrCamera[];
+ static char mStrCamcorder[];
+
+ static int thermalCallback(int level, void *userdata, void *data);
+
+ QCameraThermalCallback *mCallback;
+ void *mHandle;
+ int (*mRegister)(char *name,
+ int (*callback)(int, void *userdata, void *data), void *data);
+ int (*mUnregister)(int handle);
+ int mCameraHandle;
+ int mCamcorderHandle;
+
+ QCameraThermalAdapter();
+ QCameraThermalAdapter(QCameraThermalAdapter const& copy); // not implemented
+ QCameraThermalAdapter& operator=(QCameraThermalAdapter const& copy); // not implemented
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_THERMAL_ADAPTER__ */
diff --git a/camera/QCamera2/HAL/android/QCamera2External.h b/camera/QCamera2/HAL/android/QCamera2External.h
new file mode 100644
index 0000000..37e8f56
--- /dev/null
+++ b/camera/QCamera2/HAL/android/QCamera2External.h
@@ -0,0 +1,47 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA2EXTERNAL_H__
+#define __QCAMERA2EXTERNAL_H__
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Display dependencies
+#include "QServiceUtils.h"
+
+namespace qcamera {
+
+inline android::status_t setCameraLaunchStatus(uint32_t on) {
+ return ::setCameraLaunchStatus(on);
+}
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2EXTERNAL_H__ */
diff --git a/camera/QCamera2/HAL/test/Android.mk b/camera/QCamera2/HAL/test/Android.mk
new file mode 100644
index 0000000..3744034
--- /dev/null
+++ b/camera/QCamera2/HAL/test/Android.mk
@@ -0,0 +1,64 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ qcamera_test.cpp \
+
+LOCAL_SHARED_LIBRARIES:= \
+ libdl \
+ libui \
+ libutils \
+ libcutils \
+ libbinder \
+ libmedia \
+ libui \
+ libgui \
+ libcamera_client \
+ libskia \
+ libstagefright \
+ libstagefright_foundation \
+
+ifneq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= 18 ))" )))
+
+LOCAL_SHARED_LIBRARIES += \
+ libmedia_native \
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_CFLAGS += -DUSE_JB_MR1
+
+endif
+
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/ui \
+ frameworks/base/include/surfaceflinger \
+ frameworks/base/include/camera \
+ frameworks/base/include/media \
+ external/skia/include/core \
+ external/skia/include/images \
+ $(TARGET_OUT_HEADERS)/qcom/display \
+ hardware/qcom/camera/QCamera2/stack/common \
+ hardware/qcom/camera/QCamera2/stack/mm-camera-interface/inc \
+ frameworks/av/include/media/stagefright \
+ frameworks/native/include/media/openmax \
+ $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_MODULE:= camera_test
+LOCAL_MODULE_TAGS:= tests
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -O0
+
+ifeq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= 20 ))" )))
+
+LOCAL_CFLAGS += -DUSE_SDK_20_OR_HIGHER
+
+ifeq ($(TARGET_USES_AOSP),true)
+LOCAL_CFLAGS += -DVANILLA_HAL
+endif
+
+endif
+
+include $(BUILD_EXECUTABLE)
diff --git a/camera/QCamera2/HAL/test/qcamera_test.cpp b/camera/QCamera2/HAL/test/qcamera_test.cpp
new file mode 100644
index 0000000..dd06c67
--- /dev/null
+++ b/camera/QCamera2/HAL/test/qcamera_test.cpp
@@ -0,0 +1,3710 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <time.h>
+#include <semaphore.h>
+#include <pthread.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/wait.h>
+
+#include <ui/DisplayInfo.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/ISurfaceComposer.h>
+
+#include <system/camera.h>
+
+#include <camera/Camera.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
+#include <media/mediarecorder.h>
+
+#include <utils/RefBase.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <cutils/memory.h>
+#include <SkImageDecoder.h>
+#include <SkImageEncoder.h>
+#include <MediaCodec.h>
+#include <OMX_IVCommon.h>
+#include <foundation/AMessage.h>
+#include <media/ICrypto.h>
+#include <MediaMuxer.h>
+#include <foundation/ABuffer.h>
+#include <MediaErrors.h>
+#include <gralloc_priv.h>
+#include <math.h>
+
+#include "qcamera_test.h"
+#include "cam_types.h"
+#include "mm_camera_dbg.h"
+
+#define VIDEO_BUF_ALLIGN(size, allign) \
+ (((size) + (allign-1)) & (typeof(size))(~(allign-1)))
+
+namespace qcamera {
+
+using namespace android;
+
+int CameraContext::JpegIdx = 0;
+int CameraContext::mPiPIdx = 0;
+const char CameraContext::KEY_ZSL[] = "zsl";
+
+/*===========================================================================
+ * FUNCTION : previewCallback
+ *
+ * DESCRIPTION: preview callback preview mesages are enabled
+ *
+ * PARAMETERS :
+ * @mem : preview buffer
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::previewCallback(const sp<IMemory>& mem)
+{
+ printf("PREVIEW Callback %p", mem->pointer());
+ uint8_t *ptr = (uint8_t*) mem->pointer();
+ if (NULL != ptr) {
+ printf("PRV_CB: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
+ ptr[0],
+ ptr[1],
+ ptr[2],
+ ptr[3],
+ ptr[4],
+ ptr[5],
+ ptr[6],
+ ptr[7],
+ ptr[8],
+ ptr[9]);
+ } else {
+ ALOGE(" no preview for NULL CB\n");
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : useLock
+ *
+ * DESCRIPTION: Mutex lock for CameraContext
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void CameraContext::useLock()
+{
+ Mutex::Autolock l(mLock);
+ while (mInUse) {
+ mCond.wait(mLock);
+ }
+ mInUse = true;
+}
+
+/*===========================================================================
+ * FUNCTION : signalFinished
+ *
+ * DESCRIPTION: Mutex unlock CameraContext
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void CameraContext::signalFinished()
+{
+ Mutex::Autolock l(mLock);
+ mInUse = false;
+ mCond.signal();
+}
+
+/*===========================================================================
+ * FUNCTION : saveFile
+ *
+ * DESCRIPTION: helper function for saving buffers on filesystem
+ *
+ * PARAMETERS :
+ * @mem : buffer to save to filesystem
+ * @path: File path
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::saveFile(const sp<IMemory>& mem, String8 path)
+{
+ unsigned char *buff = NULL;
+ ssize_t size;
+ int fd = -1;
+
+ if (mem == NULL) {
+ return BAD_VALUE;
+ }
+
+ fd = open(path, O_CREAT | O_WRONLY | O_TRUNC, 0655);
+ if(fd < 0) {
+ printf("Unable to open file %s %s\n", path.string(), strerror(fd));
+ return -errno;
+ }
+
+ size = (ssize_t)mem->size();
+ if (size <= 0) {
+ printf("IMemory object is of zero size\n");
+ close(fd);
+ return BAD_VALUE;
+ }
+
+ buff = (unsigned char *)mem->pointer();
+ if (!buff) {
+ printf("Buffer pointer is invalid\n");
+ close(fd);
+ return BAD_VALUE;
+ }
+
+ if (size != write(fd, buff, (size_t)size)) {
+ printf("Bad Write error (%d)%s\n", errno, strerror(errno));
+ close(fd);
+ return INVALID_OPERATION;
+ }
+
+ printf("%s: buffer=%p, size=%lld stored at %s\n",
+ __FUNCTION__, buff, (long long int) size, path.string());
+
+ if (fd >= 0)
+ close(fd);
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : PiPCopyToOneFile
+ *
+ * DESCRIPTION: Copy the smaller picture to the bigger one
+ *
+ * PARAMETERS :
+ * @bitmap0 : Decoded image buffer 0
+ * @bitmap1 : Decoded image buffer 1
+ *
+ * RETURN : decoded picture in picture in SkBitmap
+ *==========================================================================*/
+SkBitmap * CameraContext::PiPCopyToOneFile(
+ SkBitmap *bitmap0, SkBitmap *bitmap1)
+{
+ size_t size0;
+ size_t size1;
+ SkBitmap *src;
+ SkBitmap *dst;
+ unsigned int dstOffset;
+ unsigned int srcOffset;
+
+ if (bitmap0 == NULL || bitmap1 == NULL) {
+ ALOGE(" bitmap0 : %p, bitmap1 : %p\n", bitmap0, bitmap1);
+ return NULL;
+ }
+
+ size0 = bitmap0->getSize();
+ if (size0 <= 0) {
+ printf("Decoded image 0 is of zero size\n");
+ return NULL;
+ }
+
+ size1 = bitmap1->getSize();
+ if (size1 <= 0) {
+ printf("Decoded image 1 is of zero size\n");
+ return NULL;
+ }
+
+ if (size0 > size1) {
+ dst = bitmap0;
+ src = bitmap1;
+ } else if (size1 > size0){
+ dst = bitmap1;
+ src = bitmap0;
+ } else {
+ printf("Picture size should be with different size!\n");
+ return NULL;
+ }
+
+ for (unsigned int i = 0; i < (unsigned int)src->height(); i++) {
+ dstOffset = i * (unsigned int)dst->width() * mfmtMultiplier;
+ srcOffset = i * (unsigned int)src->width() * mfmtMultiplier;
+ memcpy(((unsigned char *)dst->getPixels()) + dstOffset,
+ ((unsigned char *)src->getPixels()) + srcOffset,
+ (unsigned int)src->width() * mfmtMultiplier);
+ }
+
+ return dst;
+}
+
+/*===========================================================================
+ * FUNCTION : decodeJPEG
+ *
+ * DESCRIPTION: decode jpeg input buffer.
+ *
+ * PARAMETERS :
+ * @mem : buffer to decode
+ * @skBM : decoded buffer
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+
+ *==========================================================================*/
+status_t CameraContext::decodeJPEG(const sp<IMemory>& mem, SkBitmap *skBM)
+{
+#ifndef USE_SDK_20_OR_HIGHER
+ SkBitmap::Config prefConfig = SkBitmap::kARGB_8888_Config;
+ const void *buff = NULL;
+ size_t size;
+
+ buff = (const void *)mem->pointer();
+ size= mem->size();
+
+ switch(prefConfig) {
+ case SkBitmap::kARGB_8888_Config:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ case SkBitmap::kARGB_4444_Config:
+ {
+ mfmtMultiplier = 2;
+ }
+ break;
+
+ case SkBitmap::kRGB_565_Config:
+ {
+ mfmtMultiplier = 2;
+ }
+ break;
+
+ case SkBitmap::kIndex8_Config:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ case SkBitmap::kA8_Config:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ default:
+ {
+ mfmtMultiplier = 0;
+ printf("Decode format is not correct!\n");
+ }
+ break;
+ }
+
+ if (SkImageDecoder::DecodeMemory(buff, size, skBM, prefConfig,
+ SkImageDecoder::kDecodePixels_Mode) == false) {
+ printf("%s():%d:: Failed during jpeg decode\n",__FUNCTION__,__LINE__);
+ return BAD_VALUE;
+ }
+#else
+ SkColorType prefConfig = kRGBA_8888_SkColorType;
+ const void *buff = NULL;
+ size_t size;
+
+ buff = (const void *)mem->pointer();
+ size= mem->size();
+
+ switch(prefConfig) {
+ case kRGBA_8888_SkColorType:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ case kBGRA_8888_SkColorType:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ case kARGB_4444_SkColorType:
+ {
+ mfmtMultiplier = 2;
+ }
+ break;
+
+ case kRGB_565_SkColorType:
+ {
+ mfmtMultiplier = 2;
+ }
+ break;
+
+ case kIndex_8_SkColorType:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ case kAlpha_8_SkColorType:
+ {
+ mfmtMultiplier = 4;
+ }
+ break;
+
+ default:
+ {
+ mfmtMultiplier = 0;
+ printf("Decode format is not correct!\n");
+ }
+ break;
+ }
+
+ if (SkImageDecoder::DecodeMemory(buff, size, skBM, prefConfig,
+ SkImageDecoder::kDecodePixels_Mode) == false) {
+ printf("%s():%d:: Failed during jpeg decode\n",__FUNCTION__,__LINE__);
+ return BAD_VALUE;
+ }
+
+#endif
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : encodeJPEG
+ *
+ * DESCRIPTION: encode the decoded input buffer.
+ *
+ * PARAMETERS :
+ * @stream : SkWStream
+ * @bitmap : SkBitmap decoded image to encode
+ * @path : File path
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+
+ *==========================================================================*/
+status_t CameraContext::encodeJPEG(SkWStream * stream,
+ const SkBitmap *bitmap, String8 path)
+{
+ int qFactor = 100;
+
+ skJpegEnc = SkImageEncoder::Create(SkImageEncoder::kJPEG_Type);
+ if (!skJpegEnc) {
+ ALOGE(" skJpegEnc is NULL\n");
+ return BAD_VALUE;
+ }
+
+ if (skJpegEnc->encodeStream(stream, *bitmap, qFactor) == false) {
+ return BAD_VALUE;
+ }
+
+ FILE *fh = fopen(path.string(), "r+");
+ if ( !fh ) {
+ printf("Could not open file %s\n", path.string());
+ return BAD_VALUE;
+ }
+
+ fseek(fh, 0, SEEK_END);
+ size_t len = (size_t)ftell(fh);
+ rewind(fh);
+
+ if( !len ) {
+ printf("File %s is empty !\n", path.string());
+ fclose(fh);
+ return BAD_VALUE;
+ }
+
+ unsigned char *buff = (unsigned char*)malloc(len);
+ if (!buff) {
+ printf("Cannot allocate memory for buffer reading!\n");
+ return BAD_VALUE;
+ }
+
+ size_t readSize = fread(buff, 1, len, fh);
+ if (readSize != len) {
+ printf("Reading error\n");
+ return BAD_VALUE;
+ }
+
+ status_t ret = ReadSectionsFromBuffer(buff, len, READ_ALL);
+ if (ret != NO_ERROR) {
+ printf("Cannot read sections from buffer\n");
+ DiscardData();
+ DiscardSections();
+ return BAD_VALUE;
+ }
+ free(buff);
+ rewind(fh);
+
+ unsigned char temp = 0xff;
+ size_t writeSize = fwrite(&temp, sizeof(unsigned char), 1, fh);
+ if (1 != writeSize) {
+ printf("Writing error\n");
+ }
+ temp = 0xd8;
+ fwrite(&temp, sizeof(unsigned char), 1, fh);
+
+ for (size_t i = 0; i < mSectionsRead; i++) {
+ switch((mSections[i].Type)) {
+
+ case 0x123:
+ fwrite(mSections[i].Data, sizeof(unsigned char),
+ mSections[i].Size, fh);
+ break;
+
+ case 0xe0:
+ temp = 0xff;
+ fwrite(&temp, sizeof(unsigned char), 1, fh);
+ temp = 0xe1;
+ fwrite(&temp, sizeof(unsigned char), 1, fh);
+ fwrite(mJEXIFSection.Data, sizeof(unsigned char),
+ mJEXIFSection.Size, fh);
+ break;
+
+ default:
+ temp = 0xff;
+ fwrite(&temp, sizeof(unsigned char), 1, fh);
+ fwrite(&mSections[i].Type, sizeof(unsigned char), 1, fh);
+ fwrite(mSections[i].Data, sizeof(unsigned char),
+ mSections[i].Size, fh);
+ break;
+ }
+ }
+ fseek(fh, 0, SEEK_END);
+ len = (size_t)ftell(fh);
+ rewind(fh);
+ printf("%s: buffer=%p, size=%zu stored at %s\n",
+ __FUNCTION__, bitmap->getPixels(), len, path.string());
+
+ free(mJEXIFSection.Data);
+ DiscardData();
+ DiscardSections();
+ fclose(fh);
+ ret = NO_ERROR;
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : readSectionsFromBuffer
+ *
+ * DESCRIPTION: read all jpeg sections of input buffer.
+ *
+ * PARAMETERS :
+ * @mem : buffer to read from Metadata Sections
+ * @buffer_size: buffer size
+ * @ReadMode: Read mode - all, jpeg or exif
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::ReadSectionsFromBuffer (unsigned char *buffer,
+ size_t buffer_size, ReadMode_t ReadMode)
+{
+ int a;
+ size_t pos = 0;
+ int HaveCom = 0;
+ mSectionsAllocated = 10;
+
+ mSections = (Sections_t *)malloc(sizeof(Sections_t) * mSectionsAllocated);
+ if (!mSections) {
+ printf(" not enough memory\n");
+ return BAD_VALUE;
+ }
+
+ if (!buffer) {
+ printf("Input buffer is null\n");
+ return BAD_VALUE;
+ }
+
+ if (buffer_size < 1) {
+ printf("Input size is 0\n");
+ return BAD_VALUE;
+ }
+
+ a = (int) buffer[pos++];
+
+ if (a != 0xff || buffer[pos++] != M_SOI){
+ printf("No valid image\n");
+ return BAD_VALUE;
+ }
+
+ for(;;){
+ size_t itemlen;
+ int marker = 0;
+ size_t ll,lh;
+ unsigned char * Data;
+
+ CheckSectionsAllocated();
+
+ // The call to CheckSectionsAllocated() may reallocate mSections
+ // so need to check for NULL again.
+ if (mSections == NULL) {
+ printf(" not enough memory\n");
+ return BAD_VALUE;
+ }
+
+ for (a = 0; a <= 16; a++){
+ marker = buffer[pos++];
+ if (marker != 0xff) break;
+
+ if (a >= 16){
+ fprintf(stderr,"too many padding bytes\n");
+ return BAD_VALUE;
+ }
+ }
+
+ mSections[mSectionsRead].Type = marker;
+
+ // Read the length of the section.
+ lh = buffer[pos++];
+ ll = buffer[pos++];
+
+ itemlen = (lh << 8) | ll;
+
+ if (itemlen < 2) {
+ ALOGE("invalid marker");
+ return BAD_VALUE;
+ }
+
+ mSections[mSectionsRead].Size = itemlen;
+
+ Data = (unsigned char *)malloc(itemlen);
+ if (Data == NULL) {
+ ALOGE("Could not allocate memory");
+ return NO_MEMORY;
+ }
+ mSections[mSectionsRead].Data = Data;
+
+ // Store first two pre-read bytes.
+ Data[0] = (unsigned char)lh;
+ Data[1] = (unsigned char)ll;
+
+ if (pos+itemlen-2 > buffer_size) {
+ ALOGE("Premature end of file?");
+ return BAD_VALUE;
+ }
+
+ memcpy(Data+2, buffer+pos, itemlen-2); // Read the whole section.
+ pos += itemlen-2;
+
+ mSectionsRead += 1;
+
+ switch(marker){
+
+ case M_SOS: // stop before hitting compressed data
+ // If reading entire image is requested, read the rest of the
+ // data.
+ if (ReadMode & READ_IMAGE){
+ size_t size;
+ // Determine how much file is left.
+ size = buffer_size - pos;
+
+ if (size < 1) {
+ ALOGE("could not read the rest of the image");
+ return BAD_VALUE;
+ }
+ Data = (unsigned char *)malloc(size);
+ if (Data == NULL) {
+ ALOGE("%d: could not allocate data for entire "
+ "image size: %d", __LINE__, size);
+ return BAD_VALUE;
+ }
+
+ memcpy(Data, buffer+pos, size);
+
+ CheckSectionsAllocated();
+
+ // The call to CheckSectionsAllocated()
+ // may reallocate mSections
+ // so need to check for NULL again.
+ if (mSections == NULL) {
+ printf(" not enough memory\n");
+ return BAD_VALUE;
+ }
+
+ mSections[mSectionsRead].Data = Data;
+ mSections[mSectionsRead].Size = size;
+ mSections[mSectionsRead].Type = PSEUDO_IMAGE_MARKER;
+ mSectionsRead ++;
+ mHaveAll = 1;
+ }
+ return NO_ERROR;
+
+ case M_EOI: // in case it's a tables-only JPEG stream
+ ALOGE("No image in jpeg!\n");
+ return BAD_VALUE;
+
+ case M_COM: // Comment section
+ if (HaveCom || ((ReadMode & READ_METADATA) == 0)){
+ // Discard this section.
+ free(mSections[--mSectionsRead].Data);
+ }
+ break;
+
+ case M_JFIF:
+ // Regular jpegs always have this tag, exif images have the
+ // exif marker instead, althogh ACDsee will write images
+ // with both markers.
+ // this program will re-create this marker on absence of exif
+ // marker.
+ // hence no need to keep the copy from the file.
+ if (ReadMode & READ_METADATA){
+ if (memcmp(Data+2, "JFIF", 4) == 0) {
+ break;
+ }
+ free(mSections[--mSectionsRead].Data);
+ }
+ break;
+
+ case M_EXIF:
+ // There can be different section using the same marker.
+ if (ReadMode & READ_METADATA){
+ if (memcmp(Data+2, "Exif", 4) == 0){
+ break;
+ }else if (memcmp(Data+2, "http:", 5) == 0){
+ // Change tag for internal purposes.
+ mSections[mSectionsRead-1].Type = M_XMP;
+ break;
+ }
+ }
+ // Oterwise, discard this section.
+ free(mSections[--mSectionsRead].Data);
+ break;
+
+ case M_IPTC:
+ if (ReadMode & READ_METADATA){
+ // Note: We just store the IPTC section.
+ // Its relatively straightforward
+ // and we don't act on any part of it,
+ // so just display it at parse time.
+ }else{
+ free(mSections[--mSectionsRead].Data);
+ }
+ break;
+
+ case M_SOF0:
+ case M_SOF1:
+ case M_SOF2:
+ case M_SOF3:
+ case M_SOF5:
+ case M_SOF6:
+ case M_SOF7:
+ case M_SOF9:
+ case M_SOF10:
+ case M_SOF11:
+ case M_SOF13:
+ case M_SOF14:
+ case M_SOF15:
+ break;
+ default:
+ // Skip any other sections.
+ break;
+ }
+ }
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : CheckSectionsAllocated
+ *
+ * DESCRIPTION: Check allocated jpeg sections.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+
+ *==========================================================================*/
+void CameraContext::CheckSectionsAllocated(void)
+{
+ if (mSectionsRead > mSectionsAllocated){
+ ALOGE("allocation screw up");
+ }
+ if (mSectionsRead >= mSectionsAllocated){
+ mSectionsAllocated += mSectionsAllocated +1;
+ mSections = (Sections_t *)realloc(mSections,
+ sizeof(Sections_t) * mSectionsAllocated);
+ if (mSections == NULL){
+ ALOGE("could not allocate data for entire image");
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : findSection
+ *
+ * DESCRIPTION: find the desired Section of the JPEG buffer.
+ *
+ * PARAMETERS :
+ * @SectionType: Section type
+ *
+ * RETURN : return the found section
+
+ *==========================================================================*/
+CameraContext::Sections_t *CameraContext::FindSection(int SectionType)
+{
+ for (unsigned int a = 0; a < mSectionsRead; a++) {
+ if (mSections[a].Type == SectionType){
+ return &mSections[a];
+ }
+ }
+ // Could not be found.
+ return NULL;
+}
+
+
+/*===========================================================================
+ * FUNCTION : DiscardData
+ *
+ * DESCRIPTION: DiscardData
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+
+ *==========================================================================*/
+void CameraContext::DiscardData()
+{
+ for (unsigned int a = 0; a < mSectionsRead; a++) {
+ free(mSections[a].Data);
+ }
+
+ mSectionsRead = 0;
+ mHaveAll = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : DiscardSections
+ *
+ * DESCRIPTION: Discard allocated sections
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+
+ *==========================================================================*/
+void CameraContext::DiscardSections()
+{
+ free(mSections);
+ mSectionsAllocated = 0;
+ mHaveAll = 0;
+}
+
+/*===========================================================================
+ * FUNCTION : notify
+ *
+ * DESCRIPTION: notify callback
+ *
+ * PARAMETERS :
+ * @msgType : type of callback
+ * @ext1: extended parameters
+ * @ext2: extended parameters
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::notify(int32_t msgType, int32_t ext1, int32_t ext2)
+{
+ printf("Notify cb: %d %d %d\n", msgType, ext1, ext2);
+
+ if (( msgType & CAMERA_MSG_PREVIEW_FRAME)
+#ifndef VANILLA_HAL
+ && (ext1 == CAMERA_FRAME_DATA_FD)
+#endif
+ )
+ {
+ int fd = dup(ext2);
+ printf("notify Preview Frame fd: %d dup fd: %d\n", ext2, fd);
+ close(fd);
+ }
+
+ if ( msgType & CAMERA_MSG_FOCUS ) {
+ printf("AutoFocus %s \n",
+ (ext1) ? "OK" : "FAIL");
+ }
+
+ if ( msgType & CAMERA_MSG_SHUTTER ) {
+ printf("Shutter done \n");
+ }
+
+ if ( msgType & CAMERA_MSG_ERROR) {
+ printf("Camera Test CAMERA_MSG_ERROR\n");
+ stopPreview();
+ closeCamera();
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : postData
+ *
+ * DESCRIPTION: handles data callbacks
+ *
+ * PARAMETERS :
+ * @msgType : type of callback
+ * @dataPtr: buffer data
+ * @metadata: additional metadata where available
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::postData(int32_t msgType,
+ const sp<IMemory>& dataPtr,
+ camera_frame_metadata_t *metadata)
+{
+ mInterpr->PiPLock();
+ Size currentPictureSize = mSupportedPictureSizes.itemAt(
+ mCurrentPictureSizeIdx);
+ unsigned char *buff = NULL;
+ size_t size;
+ status_t ret = 0;
+
+ memset(&mJEXIFSection, 0, sizeof(mJEXIFSection)),
+
+ printf("Data cb: %d\n", msgType);
+
+ if ( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
+ previewCallback(dataPtr);
+ }
+
+ if ( msgType & CAMERA_MSG_RAW_IMAGE ) {
+ printf("RAW done \n");
+ }
+
+ if (msgType & CAMERA_MSG_POSTVIEW_FRAME) {
+ printf("Postview frame \n");
+ }
+
+ if (msgType & CAMERA_MSG_COMPRESSED_IMAGE ) {
+ String8 jpegPath;
+ jpegPath = jpegPath.format(QCAMERA_DUMP_FRM_LOCATION"img_%d.jpg",
+ JpegIdx);
+ if (!mPiPCapture) {
+ // Normal capture case
+ printf("JPEG done\n");
+ saveFile(dataPtr, jpegPath);
+ JpegIdx++;
+ } else {
+ // PiP capture case
+ SkFILEWStream *wStream;
+ ret = decodeJPEG(dataPtr, &skBMtmp);
+ if (NO_ERROR != ret) {
+ printf("Error in decoding JPEG!\n");
+ mInterpr->PiPUnlock();
+ return;
+ }
+
+ mWidthTmp = currentPictureSize.width;
+ mHeightTmp = currentPictureSize.height;
+ PiPPtrTmp = dataPtr;
+ // If there are two jpeg buffers
+ if (mPiPIdx == 1) {
+ printf("PiP done\n");
+
+ // Find the the capture with higher width and height and read
+ // its jpeg sections
+ if ((mInterpr->camera[0]->mWidthTmp * mInterpr->camera[0]->mHeightTmp) >
+ (mInterpr->camera[1]->mWidthTmp * mInterpr->camera[1]->mHeightTmp)) {
+ buff = (unsigned char *)PiPPtrTmp->pointer();
+ size= PiPPtrTmp->size();
+ } else if ((mInterpr->camera[0]->mWidthTmp * mInterpr->camera[0]->mHeightTmp) <
+ (mInterpr->camera[1]->mWidthTmp * mInterpr->camera[1]->mHeightTmp)) {
+ buff = (unsigned char *)PiPPtrTmp->pointer();
+ size= PiPPtrTmp->size();
+ } else {
+ printf("Cannot take PiP. Images are with the same width"
+ " and height size!!!\n");
+ mInterpr->PiPUnlock();
+ return;
+ }
+
+ if (buff != NULL && size != 0) {
+ ret = ReadSectionsFromBuffer(buff, size, READ_ALL);
+ if (ret != NO_ERROR) {
+ printf("Cannot read sections from buffer\n");
+ DiscardData();
+ DiscardSections();
+ mInterpr->PiPUnlock();
+ return;
+ }
+
+ mJEXIFTmp = FindSection(M_EXIF);
+ if (!mJEXIFTmp) {
+ ALOGE("skBMDec is null\n");
+ DiscardData();
+ DiscardSections();
+ return;
+ }
+ mJEXIFSection = *mJEXIFTmp;
+ mJEXIFSection.Data = (unsigned char*)malloc(mJEXIFTmp->Size);
+ if (!mJEXIFSection.Data) {
+ ALOGE(" Not enough memory\n");
+ DiscardData();
+ DiscardSections();
+ return;
+ }
+ memcpy(mJEXIFSection.Data,
+ mJEXIFTmp->Data, mJEXIFTmp->Size);
+ DiscardData();
+ DiscardSections();
+
+ wStream = new SkFILEWStream(jpegPath.string());
+ skBMDec = PiPCopyToOneFile(&mInterpr->camera[0]->skBMtmp,
+ &mInterpr->camera[1]->skBMtmp);
+ if (!skBMDec) {
+ ALOGE("skBMDec is null\n");
+ delete wStream;
+ return;
+ }
+
+ if (encodeJPEG(wStream, skBMDec, jpegPath) != false) {
+ printf("%s():%d:: Failed during jpeg encode\n",
+ __FUNCTION__,__LINE__);
+ mInterpr->PiPUnlock();
+ return;
+ }
+ mPiPIdx = 0;
+ JpegIdx++;
+ delete wStream;
+ }
+ } else {
+ mPiPIdx++;
+ }
+ disablePiPCapture();
+ }
+ }
+
+ if ((msgType & CAMERA_MSG_PREVIEW_METADATA) && (NULL != metadata)) {
+ printf("Face detected %d \n", metadata->number_of_faces);
+ }
+ mInterpr->PiPUnlock();
+
+}
+
+/*===========================================================================
+ * FUNCTION : postDataTimestamp
+ *
+ * DESCRIPTION: handles recording callbacks
+ *
+ * PARAMETERS :
+ * @timestamp : timestamp of buffer
+ * @msgType : type of buffer
+ * @dataPtr : buffer data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::postDataTimestamp(nsecs_t timestamp,
+ int32_t msgType,
+ const sp<IMemory>& dataPtr)
+{
+ printf("Recording cb: %d %lld %p\n",
+ msgType, (long long int)timestamp, dataPtr.get());
+}
+
+/*===========================================================================
+ * FUNCTION : dataCallbackTimestamp
+ *
+ * DESCRIPTION: handles recording callbacks. Used for ViV recording
+ *
+ * PARAMETERS :
+ * @timestamp : timestamp of buffer
+ * @msgType : type of buffer
+ * @dataPtr : buffer data
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::dataCallbackTimestamp(nsecs_t timestamp,
+ int32_t msgType,
+ const sp<IMemory>& dataPtr)
+{
+ mInterpr->ViVLock();
+ // Not needed check. Just avoiding warnings of not used variables.
+ if (timestamp > 0)
+ timestamp = 0;
+ // Not needed check. Just avoiding warnings of not used variables.
+ if (msgType > 0)
+ msgType = 0;
+ size_t i = 0;
+ void * srcBuff = NULL;
+ void * dstBuff = NULL;
+
+ size_t srcYStride = 0, dstYStride = 0;
+ size_t srcUVStride = 0, dstUVStride = 0;
+ size_t srcYScanLines = 0, dstYScanLines = 0;
+ size_t srcUVScanLines = 0, dstUVScanLines = 0;
+ size_t srcOffset = 0, dstOffset = 0;
+ size_t srcBaseOffset = 0;
+ size_t dstBaseOffset = 0;
+ Size currentVideoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+ status_t err = NO_ERROR;
+ ANativeWindowBuffer* anb = NULL;
+
+ dstBuff = (void *) dataPtr->pointer();
+ if (NULL == dstBuff) {
+ printf("Cannot access destination buffer!!!\n");
+ mInterpr->ViVUnlock();
+ return;
+ }
+
+ if (mCameraIndex == mInterpr->mViVVid.sourceCameraID) {
+ srcYStride = calcStride(currentVideoSize.width);
+ srcUVStride = calcStride(currentVideoSize.width);
+ srcYScanLines = calcYScanLines(currentVideoSize.height);
+ srcUVScanLines = calcUVScanLines(currentVideoSize.height);
+ mInterpr->mViVBuff.srcWidth = (size_t)currentVideoSize.width;
+ mInterpr->mViVBuff.srcHeight = (size_t)currentVideoSize.height;
+
+
+ mInterpr->mViVBuff.YStride = srcYStride;
+ mInterpr->mViVBuff.UVStride = srcUVStride;
+ mInterpr->mViVBuff.YScanLines = srcYScanLines;
+ mInterpr->mViVBuff.UVScanLines = srcUVScanLines;
+
+ memcpy( mInterpr->mViVBuff.buff, dstBuff,
+ mInterpr->mViVBuff.buffSize);
+
+ mInterpr->mViVVid.isBuffValid = true;
+ } else if (mCameraIndex == mInterpr->mViVVid.destinationCameraID) {
+ if(mInterpr->mViVVid.isBuffValid == true) {
+ dstYStride = calcStride(currentVideoSize.width);
+ dstUVStride = calcStride(currentVideoSize.width);
+ dstYScanLines = calcYScanLines(currentVideoSize.height);
+ dstUVScanLines = calcUVScanLines(currentVideoSize.height);
+
+ srcYStride = mInterpr->mViVBuff.YStride;
+ srcUVStride = mInterpr->mViVBuff.UVStride;
+ srcYScanLines = mInterpr->mViVBuff.YScanLines;
+ srcUVScanLines = mInterpr->mViVBuff.UVScanLines;
+
+
+ for (i = 0; i < mInterpr->mViVBuff.srcHeight; i++) {
+ srcOffset = i*srcYStride;
+ dstOffset = i*dstYStride;
+ memcpy((unsigned char *) dstBuff + dstOffset,
+ (unsigned char *) mInterpr->mViVBuff.buff +
+ srcOffset, mInterpr->mViVBuff.srcWidth);
+ }
+ srcBaseOffset = srcYStride * srcYScanLines;
+ dstBaseOffset = dstYStride * dstYScanLines;
+ for (i = 0; i < mInterpr->mViVBuff.srcHeight / 2; i++) {
+ srcOffset = i*srcUVStride + srcBaseOffset;
+ dstOffset = i*dstUVStride + dstBaseOffset;
+ memcpy((unsigned char *) dstBuff + dstOffset,
+ (unsigned char *) mInterpr->mViVBuff.buff +
+ srcOffset, mInterpr->mViVBuff.srcWidth);
+ }
+
+ err = native_window_dequeue_buffer_and_wait(
+ mInterpr->mViVVid.ANW.get(),&anb);
+ if (err != NO_ERROR) {
+ printf("Cannot dequeue anb for sensor %d!!!\n", mCameraIndex);
+ mInterpr->ViVUnlock();
+ return;
+ }
+ mInterpr->mViVVid.graphBuf = new GraphicBuffer(anb, false);
+ if(NULL == mInterpr->mViVVid.graphBuf.get()) {
+ printf("Invalid Graphic buffer\n");
+ mInterpr->ViVUnlock();
+ return;
+ }
+ err = mInterpr->mViVVid.graphBuf->lock(
+ GRALLOC_USAGE_SW_WRITE_OFTEN,
+ (void**)(&mInterpr->mViVVid.mappedBuff));
+ if (err != NO_ERROR) {
+ printf("Graphic buffer could not be locked %d!!!\n", err);
+ mInterpr->ViVUnlock();
+ return;
+ }
+
+ srcYStride = dstYStride;
+ srcUVStride = dstUVStride;
+ srcYScanLines = dstYScanLines;
+ srcUVScanLines = dstUVScanLines;
+ srcBuff = dstBuff;
+
+ for (i = 0; i < (size_t)currentVideoSize.height; i++) {
+ srcOffset = i*srcYStride;
+ dstOffset = i*dstYStride;
+ memcpy((unsigned char *) mInterpr->mViVVid.mappedBuff +
+ dstOffset, (unsigned char *) srcBuff +
+ srcOffset, (size_t)currentVideoSize.width);
+ }
+
+ srcBaseOffset = srcYStride * srcYScanLines;
+ dstBaseOffset = dstUVStride * (size_t)currentVideoSize.height;
+
+ for (i = 0; i < (size_t)currentVideoSize.height / 2; i++) {
+ srcOffset = i*srcUVStride + srcBaseOffset;
+ dstOffset = i*dstUVStride + dstBaseOffset;
+ memcpy((unsigned char *) mInterpr->mViVVid.mappedBuff +
+ dstOffset, (unsigned char *) srcBuff +
+ srcOffset, (size_t)currentVideoSize.width);
+ }
+
+
+ mInterpr->mViVVid.graphBuf->unlock();
+
+ err = mInterpr->mViVVid.ANW->queueBuffer(
+ mInterpr->mViVVid.ANW.get(), anb, -1);
+ if(err)
+ printf("Failed to enqueue buffer to recorder!!!\n");
+ }
+ }
+ mCamera->releaseRecordingFrame(dataPtr);
+
+ mInterpr->ViVUnlock();
+}
+
+/*===========================================================================
+ * FUNCTION : ViVEncoderThread
+ *
+ * DESCRIPTION: Creates a separate thread for ViV recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+status_t Interpreter::ViVEncoderThread()
+{
+ int ret = NO_ERROR;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+
+ ret = pthread_create(&mViVEncThread, &attr, ThreadWrapper, this);
+ ret = pthread_attr_destroy(&attr);
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : ThreadWrapper
+ *
+ * DESCRIPTION: Helper function for for ViV recording thread
+ *
+ * PARAMETERS : Interpreter context
+ *
+ * RETURN : None
+ *==========================================================================*/
+void *Interpreter::ThreadWrapper(void *context) {
+ Interpreter *writer = static_cast<Interpreter *>(context);
+ writer->ViVEncode();
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : ViVEncode
+ *
+ * DESCRIPTION: Thread for ViV encode. Buffers from video codec are sent to
+ * muxer and saved in a file.
+ *
+ * PARAMETERS : Interpreter context
+ *
+ * RETURN : None
+ *==========================================================================*/
+void Interpreter::ViVEncode()
+{
+ status_t err = NO_ERROR;
+ ssize_t trackIdx = -1;
+ uint32_t debugNumFrames = 0;
+
+ size_t bufIndex, offset, size;
+ int64_t ptsUsec;
+ uint32_t flags;
+ bool DoRecording = true;
+
+
+ err = mTestContext->mViVVid.codec->getOutputBuffers(
+ &mTestContext->mViVVid.buffers);
+ if (err != NO_ERROR) {
+ printf("Unable to get output buffers (err=%d)\n", err);
+ }
+
+ while (DoRecording) {
+ err = mTestContext->mViVVid.codec->dequeueOutputBuffer(
+ &bufIndex,
+ &offset,
+ &size,
+ &ptsUsec,
+ &flags, -1);
+
+ switch (err) {
+
+ case NO_ERROR:
+ // got a buffer
+ if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
+ // ignore this -- we passed the CSD into MediaMuxer when
+ // we got the format change notification
+ size = 0;
+ }
+ if (size != 0) {
+ // If the virtual display isn't providing us with timestamps,
+ // use the current time.
+ if (ptsUsec == 0) {
+ ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
+ }
+
+ // The MediaMuxer docs are unclear, but it appears that we
+ // need to pass either the full set of BufferInfo flags, or
+ // (flags & BUFFER_FLAG_SYNCFRAME).
+ err = mTestContext->mViVVid.muxer->writeSampleData(
+ mTestContext->mViVVid.buffers[bufIndex],
+ (size_t)trackIdx,
+ ptsUsec,
+ flags);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Failed writing data to muxer (err=%d)\n",
+ err);
+ }
+ debugNumFrames++;
+ }
+ err = mTestContext->mViVVid.codec->releaseOutputBuffer(bufIndex);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Unable to release output buffer (err=%d)\n",
+ err);
+ }
+ if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
+ // Not expecting EOS from SurfaceFlinger. Go with it.
+ printf("Received end-of-stream\n");
+ //DoRecording = false;
+ }
+ break;
+ case -EAGAIN: // INFO_TRY_AGAIN_LATER
+ ALOGV("Got -EAGAIN, looping");
+ break;
+ case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
+ {
+ // format includes CSD, which we must provide to muxer
+ sp<AMessage> newFormat;
+ mTestContext->mViVVid.codec->getOutputFormat(&newFormat);
+ trackIdx = mTestContext->mViVVid.muxer->addTrack(newFormat);
+ err = mTestContext->mViVVid.muxer->start();
+ if (err != NO_ERROR) {
+ printf("Unable to start muxer (err=%d)\n", err);
+ }
+ }
+ break;
+ case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
+ // not expected for an encoder; handle it anyway
+ ALOGV("Encoder buffers changed");
+ err = mTestContext->mViVVid.codec->getOutputBuffers(
+ &mTestContext->mViVVid.buffers);
+ if (err != NO_ERROR) {
+ printf("Unable to get new output buffers (err=%d)\n", err);
+ }
+ break;
+ case INVALID_OPERATION:
+ DoRecording = false;
+ break;
+ default:
+ printf("Got weird result %d from dequeueOutputBuffer\n", err);
+ break;
+ }
+ }
+
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : calcBufferSize
+ *
+ * DESCRIPTION: Temp buffer size calculation. Temp buffer is used to store
+ * the buffer from the camera with smaller resolution. It is
+ * copied to the buffer from camera with higher resolution.
+ *
+ * PARAMETERS :
+ * @width : video size width
+ * @height : video size height
+ *
+ * RETURN : size_t
+ *==========================================================================*/
+size_t CameraContext::calcBufferSize(int width, int height)
+{
+ size_t size = 0;
+ size_t UVAlignment;
+ size_t YPlane, UVPlane, YStride, UVStride, YScanlines, UVScanlines;
+ if (!width || !height) {
+ return size;
+ }
+ UVAlignment = 4096;
+ YStride = calcStride(width);
+ UVStride = calcStride(width);
+ YScanlines = calcYScanLines(height);
+ UVScanlines = calcUVScanLines(height);
+ YPlane = YStride * YScanlines;
+ UVPlane = UVStride * UVScanlines + UVAlignment;
+ size = YPlane + UVPlane;
+ size = VIDEO_BUF_ALLIGN(size, 4096);
+
+ return size;
+}
+
+/*===========================================================================
+ * FUNCTION : calcStride
+ *
+ * DESCRIPTION: Temp buffer stride calculation.
+ *
+ * PARAMETERS :
+ * @width : video size width
+ *
+ * RETURN : size_t
+ *==========================================================================*/
+size_t CameraContext::calcStride(int width)
+{
+ size_t alignment, stride = 0;
+ if (!width) {
+ return stride;
+ }
+ alignment = 128;
+ stride = VIDEO_BUF_ALLIGN((size_t)width, alignment);
+
+ return stride;
+}
+
+/*===========================================================================
+ * FUNCTION : calcYScanLines
+ *
+ * DESCRIPTION: Temp buffer scanlines calculation for Y plane.
+ *
+ * PARAMETERS :
+ * @width : video size height
+ *
+ * RETURN : size_t
+ *==========================================================================*/
+size_t CameraContext::calcYScanLines(int height)
+{
+ size_t alignment, scanlines = 0;
+ if (!height) {
+ return scanlines;
+ }
+ alignment = 32;
+ scanlines = VIDEO_BUF_ALLIGN((size_t)height, alignment);
+
+ return scanlines;
+}
+
+/*===========================================================================
+ * FUNCTION : calcUVScanLines
+ *
+ * DESCRIPTION: Temp buffer scanlines calculation for UV plane.
+ *
+ * PARAMETERS :
+ * @width : video size height
+ *
+ * RETURN : size_t
+ *==========================================================================*/
+size_t CameraContext::calcUVScanLines(int height)
+{
+ size_t alignment, scanlines = 0;
+ if (!height) {
+ return scanlines;
+ }
+ alignment = 16;
+ scanlines = VIDEO_BUF_ALLIGN((size_t)((height + 1) >> 1), alignment);
+
+ return scanlines;
+}
+
+/*===========================================================================
+ * FUNCTION : printSupportedParams
+ *
+ * DESCRIPTION: dump common supported parameters
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::printSupportedParams()
+{
+ const char *camera_ids = mParams.get("camera-indexes");
+ const char *pic_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES);
+ const char *pic_formats = mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS);
+ const char *preview_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
+ const char *video_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES);
+ const char *preview_formats = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
+ const char *frame_rates = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+ const char *thumb_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES);
+ const char *wb_modes = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ const char *effects = mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS);
+ const char *scene_modes = mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES);
+ const char *focus_modes = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ const char *antibanding_modes = mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING);
+ const char *flash_modes = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ int focus_areas = mParams.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS);
+ const char *fps_ranges = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE);
+ const char *focus_distances = mParams.get(CameraParameters::KEY_FOCUS_DISTANCES);
+
+ printf("\n\r\tSupported Cameras: %s",
+ (camera_ids != NULL)? camera_ids : "NULL");
+ printf("\n\r\tSupported Picture Sizes: %s",
+ (pic_sizes != NULL)? pic_sizes : "NULL");
+ printf("\n\r\tSupported Picture Formats: %s",
+ (pic_formats != NULL)? pic_formats : "NULL");
+ printf("\n\r\tSupported Preview Sizes: %s",
+ (preview_sizes != NULL)? preview_sizes : "NULL");
+ printf("\n\r\tSupported Video Sizes: %s",
+ (video_sizes != NULL)? video_sizes : "NULL");
+ printf("\n\r\tSupported Preview Formats: %s",
+ (preview_formats != NULL)? preview_formats : "NULL");
+ printf("\n\r\tSupported Preview Frame Rates: %s",
+ (frame_rates != NULL)? frame_rates : "NULL");
+ printf("\n\r\tSupported Thumbnail Sizes: %s",
+ (thumb_sizes != NULL)? thumb_sizes : "NULL");
+ printf("\n\r\tSupported Whitebalance Modes: %s",
+ (wb_modes != NULL)? wb_modes : "NULL");
+ printf("\n\r\tSupported Effects: %s",
+ (effects != NULL)? effects : "NULL");
+ printf("\n\r\tSupported Scene Modes: %s",
+ (scene_modes != NULL)? scene_modes : "NULL");
+ printf("\n\r\tSupported Focus Modes: %s",
+ (focus_modes != NULL)? focus_modes : "NULL");
+ printf("\n\r\tSupported Antibanding Options: %s",
+ (antibanding_modes != NULL)? antibanding_modes : "NULL");
+ printf("\n\r\tSupported Flash Modes: %s",
+ (flash_modes != NULL)? flash_modes : "NULL");
+ printf("\n\r\tSupported Focus Areas: %d", focus_areas);
+ printf("\n\r\tSupported FPS ranges : %s",
+ (fps_ranges != NULL)? fps_ranges : "NULL");
+ printf("\n\r\tFocus Distances: %s \n",
+ (focus_distances != NULL)? focus_distances : "NULL");
+}
+
+/*===========================================================================
+ * FUNCTION : createPreviewSurface
+ *
+ * DESCRIPTION: helper function for creating preview surfaces
+ *
+ * PARAMETERS :
+ * @width : preview width
+ * @height: preview height
+ * @pixFormat : surface pixelformat
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::createPreviewSurface(int width, int height, int32_t pixFormat)
+{
+ int ret = NO_ERROR;
+ DisplayInfo dinfo;
+ sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+ ISurfaceComposer::eDisplayIdMain));
+ SurfaceComposerClient::getDisplayInfo(display, &dinfo);
+ uint32_t previewWidth, previewHeight;
+
+ if ((0 >= width) || (0 >= height)) {
+ printf("Bad preview surface size %dx%d\n", width, height);
+ return BAD_VALUE;
+ }
+
+ if ((int)dinfo.w < width) {
+ previewWidth = dinfo.w;
+ } else {
+ previewWidth = (unsigned int)width;
+ }
+
+ if ((int)dinfo.h < height) {
+ previewHeight = dinfo.h;
+ } else {
+ previewHeight = (unsigned int)height;
+ }
+
+ mClient = new SurfaceComposerClient();
+
+ if ( NULL == mClient.get() ) {
+ printf("Unable to establish connection to Surface Composer \n");
+ return NO_INIT;
+ }
+
+ mSurfaceControl = mClient->createSurface(String8("QCamera_Test"),
+ previewWidth,
+ previewHeight,
+ pixFormat,
+ 0);
+ if ( NULL == mSurfaceControl.get() ) {
+ printf("Unable to create preview surface \n");
+ return NO_INIT;
+ }
+
+ mPreviewSurface = mSurfaceControl->getSurface();
+ if ( NULL != mPreviewSurface.get() ) {
+ mClient->openGlobalTransaction();
+ ret |= mSurfaceControl->setLayer(0x7fffffff);
+ if ( mCameraIndex == 0 )
+ ret |= mSurfaceControl->setPosition(0, 0);
+ else
+ ret |= mSurfaceControl->setPosition((float)(dinfo.w - previewWidth),
+ (float)(dinfo.h - previewHeight));
+
+ ret |= mSurfaceControl->setSize(previewWidth, previewHeight);
+ ret |= mSurfaceControl->show();
+ mClient->closeGlobalTransaction();
+
+ if ( NO_ERROR != ret ) {
+ printf("Preview surface configuration failed! \n");
+ }
+ } else {
+ ret = NO_INIT;
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : destroyPreviewSurface
+ *
+ * DESCRIPTION: closes previously open preview surface
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::destroyPreviewSurface()
+{
+ if ( NULL != mPreviewSurface.get() ) {
+ mPreviewSurface.clear();
+ }
+
+ if ( NULL != mSurfaceControl.get() ) {
+ mSurfaceControl->clear();
+ mSurfaceControl.clear();
+ }
+
+ if ( NULL != mClient.get() ) {
+ mClient->dispose();
+ mClient.clear();
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : CameraContext
+ *
+ * DESCRIPTION: camera context constructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+CameraContext::CameraContext(int cameraIndex) :
+ mCameraIndex(cameraIndex),
+ mResizePreview(true),
+ mHardwareActive(false),
+ mPreviewRunning(false),
+ mRecordRunning(false),
+ mVideoFd(-1),
+ mVideoIdx(0),
+ mRecordingHint(false),
+ mDoPrintMenu(true),
+ mPiPCapture(false),
+ mfmtMultiplier(1),
+ mSectionsRead(false),
+ mSectionsAllocated(0),
+ mSections(NULL),
+ mJEXIFTmp(NULL),
+ mHaveAll(false),
+ mCamera(NULL),
+ mClient(NULL),
+ mSurfaceControl(NULL),
+ mPreviewSurface(NULL),
+ mInUse(false)
+{
+ mRecorder = new MediaRecorder(String16("camera"));
+}
+
+/*===========================================================================
+ * FUNCTION : setTestCtxInstance
+ *
+ * DESCRIPTION : Sends TestContext instance to CameraContext
+ *
+ * PARAMETERS :
+ * @instance : TestContext instance
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::setTestCtxInstance(TestContext *instance)
+{
+ mInterpr = instance;
+}
+
+/*===========================================================================
+ * FUNCTION : setTestCtxInst
+ *
+ * DESCRIPTION : Sends TestContext instance to Interpreter
+ *
+ * PARAMETERS :
+ * @instance : TestContext instance
+ *
+ * RETURN : None
+ *==========================================================================*/
+void Interpreter::setTestCtxInst(TestContext *instance)
+{
+ mTestContext = instance;
+}
+
+/*===========================================================================
+ * FUNCTION : ~CameraContext
+ *
+ * DESCRIPTION: camera context destructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+CameraContext::~CameraContext()
+{
+ stopPreview();
+ closeCamera();
+}
+
+/*===========================================================================
+ * FUNCTION : openCamera
+ *
+ * DESCRIPTION: connects to and initializes camera
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::openCamera()
+{
+ useLock();
+ const char *ZSLStr = NULL;
+ size_t ZSLStrSize = 0;
+
+ if ( NULL != mCamera.get() ) {
+ printf("Camera already open! \n");
+ signalFinished();
+ return NO_ERROR;
+ }
+
+ printf("openCamera(camera_index=%d)\n", mCameraIndex);
+
+#ifndef USE_JB_MR1
+
+ String16 packageName("CameraTest");
+
+ mCamera = Camera::connect(mCameraIndex,
+ packageName,
+ Camera::USE_CALLING_UID);
+
+#else
+
+ mCamera = Camera::connect(mCameraIndex);
+
+#endif
+
+ if ( NULL == mCamera.get() ) {
+ printf("Unable to connect to CameraService\n");
+ signalFinished();
+ return NO_INIT;
+ }
+
+ mParams = mCamera->getParameters();
+ mParams.getSupportedPreviewSizes(mSupportedPreviewSizes);
+ mParams.getSupportedPictureSizes(mSupportedPictureSizes);
+ mParams.getSupportedVideoSizes(mSupportedVideoSizes);
+
+ mCurrentPictureSizeIdx = mSupportedPictureSizes.size() / 2;
+ mCurrentPreviewSizeIdx = mSupportedPreviewSizes.size() / 2;
+ mCurrentVideoSizeIdx = mSupportedVideoSizes.size() / 2;
+
+ mCamera->setListener(this);
+ mHardwareActive = true;
+
+ mInterpr->setViVSize((Size) mSupportedVideoSizes.itemAt(
+ mCurrentVideoSizeIdx),
+ mCameraIndex);
+
+ ZSLStr = mParams.get(CameraContext::KEY_ZSL);
+ if (NULL != ZSLStr) {
+ ZSLStrSize = strlen(ZSLStr);
+ if (!strncmp(ZSLStr, "on", ZSLStrSize)) {
+ mInterpr->mIsZSLOn = true;
+ } else if (!strncmp(ZSLStr, "off", ZSLStrSize)) {
+ mInterpr->mIsZSLOn = false;
+ } else {
+ printf("zsl value is not valid!\n");
+ }
+ } else {
+ printf("zsl is NULL\n");
+ }
+
+ signalFinished();
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : onAsBinder
+ *
+ * DESCRIPTION: onAsBinder
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : Pointer to IBinder
+ *==========================================================================*/
+IBinder* CameraContext::onAsBinder() {
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : getNumberOfCameras
+ *
+ * DESCRIPTION: returns the number of supported camera by the system
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : supported camera count
+ *==========================================================================*/
+int CameraContext::getNumberOfCameras()
+{
+ int ret = -1;
+
+ if ( NULL != mCamera.get() ) {
+ ret = mCamera->getNumberOfCameras();
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : closeCamera
+ *
+ * DESCRIPTION: closes a previously the initialized camera reference
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::closeCamera()
+{
+ useLock();
+ if ( NULL == mCamera.get() ) {
+ return NO_INIT;
+ }
+
+ mCamera->disconnect();
+ mCamera.clear();
+
+ mRecorder->init();
+ mRecorder->close();
+ mRecorder->release();
+ mRecorder.clear();
+
+ mHardwareActive = false;
+ mPreviewRunning = false;
+ mRecordRunning = false;
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : startPreview
+ *
+ * DESCRIPTION: starts camera preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startPreview()
+{
+ useLock();
+
+ int ret = NO_ERROR;
+ int previewWidth, previewHeight;
+ Size calculatedPreviewSize;
+ Size currentPreviewSize = mSupportedPreviewSizes.itemAt(
+ mCurrentPreviewSizeIdx);
+ Size currentPictureSize = mSupportedPictureSizes.itemAt(
+ mCurrentPictureSizeIdx);
+ Size currentVideoSize = mSupportedVideoSizes.itemAt(
+ mCurrentVideoSizeIdx);
+
+#ifndef USE_JB_MR1
+
+ sp<IGraphicBufferProducer> gbp;
+
+#endif
+
+ if (!mHardwareActive ) {
+ printf("Camera not active! \n");
+ return NO_INIT;
+ }
+
+ if (mPreviewRunning) {
+ printf("Preview is already running! \n");
+ signalFinished();
+ return NO_ERROR;
+ }
+
+ if (mResizePreview) {
+ mPreviewRunning = false;
+
+ if ( mRecordingHint ) {
+ calculatedPreviewSize =
+ getPreviewSizeFromVideoSizes(currentVideoSize);
+ previewWidth = calculatedPreviewSize.width;
+ previewHeight = calculatedPreviewSize.height;
+ } else {
+ previewWidth = currentPreviewSize.width;
+ previewHeight = currentPreviewSize.height;
+ }
+
+ ret = createPreviewSurface(previewWidth,
+ previewHeight,
+ HAL_PIXEL_FORMAT_YCrCb_420_SP);
+ if ( NO_ERROR != ret ) {
+ printf("Error while creating preview surface\n");
+ return ret;
+ }
+
+ // set rdi mode if system prop is set for front camera
+ if (mCameraIndex == 1) {
+ char value[32];
+ property_get("persist.camera.rdimode", value, "0");
+ int rdimode = atoi(value);
+ printf("rdi mode = %d\n", rdimode);
+ if (rdimode == 1) {
+ mParams.set("rdi-mode", "enable");
+ } else {
+ mParams.set("rdi-mode", "disable");
+ }
+ } else {
+ mParams.set("rdi-mode", "disable");
+ }
+
+ //mParams.set("rdi-mode", "enable");
+ mParams.set("recording-hint", "true");
+ mParams.setPreviewSize(previewWidth, previewHeight);
+ mParams.setPictureSize(currentPictureSize.width,
+ currentPictureSize.height);
+ mParams.setVideoSize(
+ currentVideoSize.width, currentVideoSize.height);
+
+ ret |= mCamera->setParameters(mParams.flatten());
+
+#ifndef USE_JB_MR1
+
+ gbp = mPreviewSurface->getIGraphicBufferProducer();
+ ret |= mCamera->setPreviewTarget(gbp);
+
+#else
+
+ ret |= mCamera->setPreviewDisplay(mPreviewSurface);
+
+#endif
+ mResizePreview = false;
+ }
+
+ if ( !mPreviewRunning ) {
+ ret |= mCamera->startPreview();
+ if ( NO_ERROR != ret ) {
+ printf("Preview start failed! \n");
+ return ret;
+ }
+
+ mPreviewRunning = true;
+ }
+
+ signalFinished();
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : getPreviewSizeFromVideoSizes
+ *
+ * DESCRIPTION: Get the preview size from video size. Find all resolutions with
+ * the same aspect ratio and choose the same or the closest
+ * from them.
+ *
+ * PARAMETERS :
+ * @currentVideoSize: current video size
+
+ *
+ * RETURN : PreviewSize
+ *==========================================================================*/
+Size CameraContext::getPreviewSizeFromVideoSizes(Size currentVideoSize)
+{
+
+ Size tmpPreviewSize;
+ Size PreviewSize;
+ Size PreviewSizes[mSupportedPreviewSizes.size()];
+ double tolerance = 0.00001;
+ double videoRatio;
+ double previewRatio;
+ size_t i = 0;
+ size_t j = 0;
+ int delta;
+
+ // Find all the resolutions with the same aspect ratio and choose the
+ // same or the closest resolution from them. Choose the closest resolution
+ // in case same aspect ratio is not found
+ if (currentVideoSize.width * currentVideoSize.height > 0 &&
+ mSupportedPreviewSizes.size() > 0) {
+ videoRatio = (float)currentVideoSize.width /
+ (float)currentVideoSize.height;
+ for (i=0; i<mSupportedPreviewSizes.size(); i++) {
+ tmpPreviewSize = mSupportedPreviewSizes.itemAt(i);
+ previewRatio = (float)tmpPreviewSize.width /
+ (float)tmpPreviewSize.height;
+ if (fabs(videoRatio - previewRatio) < tolerance) {
+ PreviewSizes[j] = tmpPreviewSize;
+ j++;
+ }
+ }
+
+ if ( j > 0 ) {
+ delta = abs((currentVideoSize.width *currentVideoSize.height)-
+ (PreviewSizes[0].width * PreviewSizes[0].height));
+ PreviewSize = PreviewSizes[0];
+ for (i=0; i<j; i++) {
+ if(abs(currentVideoSize.width * currentVideoSize.height) -
+ (PreviewSizes[i].width * PreviewSizes[i].height) <
+ delta) {
+ PreviewSize = PreviewSizes[i];
+ delta = abs((currentVideoSize.width *
+ currentVideoSize.height) -
+ (PreviewSizes[i].width * PreviewSizes[i].height));
+ }
+ }
+ } else {
+ // Choose the closest resolution in case same aspect ratio is
+ // not found
+ tmpPreviewSize = mSupportedPreviewSizes.itemAt(j);
+ PreviewSize = tmpPreviewSize;
+ delta = abs(
+ (currentVideoSize.width * currentVideoSize.height)-
+ (tmpPreviewSize.width * tmpPreviewSize.height));
+ for (i=0; i<mSupportedPreviewSizes.size(); i++) {
+ tmpPreviewSize = mSupportedPreviewSizes.itemAt(i);
+ if(abs(
+ (currentVideoSize.width * currentVideoSize.height)-
+ (tmpPreviewSize.width * tmpPreviewSize.height)) <
+ delta) {
+ PreviewSize = tmpPreviewSize;
+ delta = abs(
+ (currentVideoSize.width * currentVideoSize.height)-
+ (tmpPreviewSize.width * tmpPreviewSize.height));
+ }
+ }
+ }
+ } else {
+ memset(&PreviewSize, 0, sizeof(PreviewSize));
+ }
+ return PreviewSize;
+}
+
+/*===========================================================================
+ * FUNCTION : autoFocus
+ *
+ * DESCRIPTION: Triggers autofocus
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::autoFocus()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+ if ( mPreviewRunning ) {
+ ret = mCamera->autoFocus();
+ }
+
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : enablePreviewCallbacks
+ *
+ * DESCRIPTION: Enables preview callback messages
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::enablePreviewCallbacks()
+{
+ useLock();
+ if ( mHardwareActive ) {
+ mCamera->setPreviewCallbackFlags(
+ CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
+ }
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : takePicture
+ *
+ * DESCRIPTION: triggers image capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::takePicture()
+{
+ status_t ret = NO_ERROR;
+ useLock();
+ if ( mPreviewRunning ) {
+ ret = mCamera->takePicture(
+ CAMERA_MSG_COMPRESSED_IMAGE|
+ CAMERA_MSG_RAW_IMAGE);
+ if (!mRecordingHint && !mInterpr->mIsZSLOn) {
+ mPreviewRunning = false;
+ }
+ } else {
+ printf("Please resume/start the preview before taking a picture!\n");
+ }
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : configureRecorder
+ *
+ * DESCRIPTION: Configure video recorder
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::configureRecorder()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+ mResizePreview = true;
+ mParams.set("recording-hint", "true");
+ mRecordingHint = true;
+ mCamera->setParameters(mParams.flatten());
+
+ Size videoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+ ret = mRecorder->setParameters(
+ String8("video-param-encoding-bitrate=64000"));
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not configure recorder (%d)", ret);
+ return ret;
+ }
+
+ ret = mRecorder->setCamera(
+ mCamera->remote(), mCamera->getRecordingProxy());
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set camera (%d)", ret);
+ return ret;
+ }
+ ret = mRecorder->setVideoSource(VIDEO_SOURCE_CAMERA);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set video soruce (%d)", ret);
+ return ret;
+ }
+ ret = mRecorder->setAudioSource(AUDIO_SOURCE_DEFAULT);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set audio source (%d)", ret);
+ return ret;
+ }
+ ret = mRecorder->setOutputFormat(OUTPUT_FORMAT_DEFAULT);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set output format (%d)", ret);
+ return ret;
+ }
+
+ ret = mRecorder->setVideoEncoder(VIDEO_ENCODER_DEFAULT);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set video encoder (%d)", ret);
+ return ret;
+ }
+
+ char fileName[100];
+
+ snprintf(fileName, sizeof(fileName) / sizeof(char),
+ "/sdcard/vid_cam%d_%dx%d_%d.mpeg", mCameraIndex,
+ videoSize.width, videoSize.height, mVideoIdx++);
+
+ if ( mVideoFd < 0 ) {
+ mVideoFd = open(fileName, O_CREAT | O_RDWR );
+ }
+
+ if ( mVideoFd < 0 ) {
+ LOGE("Could not open video file for writing %s!", fileName);
+ return UNKNOWN_ERROR;
+ }
+
+ ret = mRecorder->setOutputFile(mVideoFd, 0, 0);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set output file (%d)", ret);
+ return ret;
+ }
+
+ ret = mRecorder->setVideoSize(videoSize.width, videoSize.height);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set video size %dx%d", videoSize.width,
+ videoSize.height);
+ return ret;
+ }
+
+ ret = mRecorder->setVideoFrameRate(30);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set video frame rate (%d)", ret);
+ return ret;
+ }
+
+ ret = mRecorder->setAudioEncoder(AUDIO_ENCODER_DEFAULT);
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not set audio encoder (%d)", ret);
+ return ret;
+ }
+
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : unconfigureViVRecording
+ *
+ * DESCRIPTION: Unconfigures video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::unconfigureRecorder()
+{
+ useLock();
+
+ if ( !mRecordRunning ) {
+ mResizePreview = true;
+ mParams.set("recording-hint", "false");
+ mRecordingHint = false;
+ mCamera->setParameters(mParams.flatten());
+ }
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : configureViVRecording
+ *
+ * DESCRIPTION: Configures video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::configureViVRecording()
+{
+ status_t ret = NO_ERROR;
+
+ mResizePreview = true;
+ mParams.set("recording-hint", "true");
+ mRecordingHint = true;
+ mCamera->setParameters(mParams.flatten());
+ mCamera->setRecordingProxyListener(this);
+
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : startRecording
+ *
+ * DESCRIPTION: triggers start recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startRecording()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+
+ if ( mPreviewRunning ) {
+
+ mCamera->unlock();
+
+ ret = mRecorder->prepare();
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not prepare recorder");
+ return ret;
+ }
+
+ ret = mRecorder->start();
+ if ( ret != NO_ERROR ) {
+ LOGE("Could not start recorder");
+ return ret;
+ }
+
+ mRecordRunning = true;
+ }
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : stopRecording
+ *
+ * DESCRIPTION: triggers start recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopRecording()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+ if ( mRecordRunning ) {
+ mRecorder->stop();
+ close(mVideoFd);
+ mVideoFd = -1;
+
+ mRecordRunning = false;
+ }
+
+ signalFinished();
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : startViVRecording
+ *
+ * DESCRIPTION: Starts video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startViVRecording()
+{
+ useLock();
+ status_t ret;
+
+ if (mInterpr->mViVVid.VideoSizes[0].width *
+ mInterpr->mViVVid.VideoSizes[0].height >=
+ mInterpr->mViVVid.VideoSizes[1].width *
+ mInterpr->mViVVid.VideoSizes[1].height) {
+ mInterpr->mViVBuff.buffSize = calcBufferSize(
+ mInterpr->mViVVid.VideoSizes[1].width,
+ mInterpr->mViVVid.VideoSizes[1].height);
+ if (mInterpr->mViVBuff.buff == NULL) {
+ mInterpr->mViVBuff.buff =
+ (void *)malloc(mInterpr->mViVBuff.buffSize);
+ }
+ mInterpr->mViVVid.sourceCameraID = 1;
+ mInterpr->mViVVid.destinationCameraID = 0;
+
+ } else {
+ mInterpr->mViVBuff.buffSize = calcBufferSize(
+ mInterpr->mViVVid.VideoSizes[0].width,
+ mInterpr->mViVVid.VideoSizes[0].height);
+ if (mInterpr->mViVBuff.buff == NULL) {
+ mInterpr->mViVBuff.buff =
+ (void *)malloc(mInterpr->mViVBuff.buffSize);
+ }
+ mInterpr->mViVVid.sourceCameraID = 0;
+ mInterpr->mViVVid.destinationCameraID = 1;
+ }
+
+ ret = mCamera->startRecording();
+
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : stopViVRecording
+ *
+ * DESCRIPTION: Stops video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopViVRecording()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+ mCamera->stopRecording();
+
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : stopPreview
+ *
+ * DESCRIPTION: stops camera preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopPreview()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+ if ( mHardwareActive ) {
+ mCamera->stopPreview();
+ ret = destroyPreviewSurface();
+ }
+
+ mPreviewRunning = false;
+ mResizePreview = true;
+
+ signalFinished();
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : resumePreview
+ *
+ * DESCRIPTION: resumes camera preview after image capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::resumePreview()
+{
+ useLock();
+ status_t ret = NO_ERROR;
+
+ if ( mHardwareActive ) {
+ ret = mCamera->startPreview();
+ mPreviewRunning = true;
+ } else {
+ ret = NO_INIT;
+ }
+
+ signalFinished();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : nextPreviewSize
+ *
+ * DESCRIPTION: Iterates through all supported preview sizes.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextPreviewSize()
+{
+ useLock();
+ if ( mHardwareActive ) {
+ mCurrentPreviewSizeIdx += 1;
+ mCurrentPreviewSizeIdx %= mSupportedPreviewSizes.size();
+ Size previewSize = mSupportedPreviewSizes.itemAt(
+ mCurrentPreviewSizeIdx);
+ mParams.setPreviewSize(previewSize.width,
+ previewSize.height);
+ mResizePreview = true;
+
+ if ( mPreviewRunning ) {
+ mCamera->stopPreview();
+ mCamera->setParameters(mParams.flatten());
+ mCamera->startPreview();
+ } else {
+ mCamera->setParameters(mParams.flatten());
+ }
+ }
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION : setPreviewSize
+ *
+ * DESCRIPTION: Sets exact preview size if supported
+ *
+ * PARAMETERS : format size in the form of WIDTHxHEIGHT
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::setPreviewSize(const char *format)
+{
+ useLock();
+ if ( mHardwareActive ) {
+ int newHeight;
+ int newWidth;
+ sscanf(format, "%dx%d", &newWidth, &newHeight);
+
+ unsigned int i;
+ for (i = 0; i < mSupportedPreviewSizes.size(); ++i) {
+ Size previewSize = mSupportedPreviewSizes.itemAt(i);
+ if ( newWidth == previewSize.width &&
+ newHeight == previewSize.height )
+ {
+ break;
+ }
+
+ }
+ if ( i == mSupportedPreviewSizes.size())
+ {
+ printf("Preview size %dx%d not supported !\n",
+ newWidth, newHeight);
+ return INVALID_OPERATION;
+ }
+
+ mParams.setPreviewSize(newWidth,
+ newHeight);
+ mResizePreview = true;
+
+ if ( mPreviewRunning ) {
+ mCamera->stopPreview();
+ mCamera->setParameters(mParams.flatten());
+ mCamera->startPreview();
+ } else {
+ mCamera->setParameters(mParams.flatten());
+ }
+ }
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getCurrentPreviewSize
+ *
+ * DESCRIPTION: queries the currently configured preview size
+ *
+ * PARAMETERS :
+ * @previewSize : preview size currently configured
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentPreviewSize(Size &previewSize)
+{
+ useLock();
+ if ( mHardwareActive ) {
+ previewSize = mSupportedPreviewSizes.itemAt(mCurrentPreviewSizeIdx);
+ }
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : nextPictureSize
+ *
+ * DESCRIPTION: Iterates through all supported picture sizes.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextPictureSize()
+{
+ useLock();
+ if ( mHardwareActive ) {
+ mCurrentPictureSizeIdx += 1;
+ mCurrentPictureSizeIdx %= mSupportedPictureSizes.size();
+ Size pictureSize = mSupportedPictureSizes.itemAt(
+ mCurrentPictureSizeIdx);
+ mParams.setPictureSize(pictureSize.width,
+ pictureSize.height);
+ mCamera->setParameters(mParams.flatten());
+ }
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setPictureSize
+ *
+ * DESCRIPTION: Sets exact preview size if supported
+ *
+ * PARAMETERS : format size in the form of WIDTHxHEIGHT
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::setPictureSize(const char *format)
+{
+ useLock();
+ if ( mHardwareActive ) {
+ int newHeight;
+ int newWidth;
+ sscanf(format, "%dx%d", &newWidth, &newHeight);
+
+ unsigned int i;
+ for (i = 0; i < mSupportedPictureSizes.size(); ++i) {
+ Size PictureSize = mSupportedPictureSizes.itemAt(i);
+ if ( newWidth == PictureSize.width &&
+ newHeight == PictureSize.height )
+ {
+ break;
+ }
+
+ }
+ if ( i == mSupportedPictureSizes.size())
+ {
+ printf("Preview size %dx%d not supported !\n",
+ newWidth, newHeight);
+ return INVALID_OPERATION;
+ }
+
+ mParams.setPictureSize(newWidth,
+ newHeight);
+ mCamera->setParameters(mParams.flatten());
+ }
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : nextVideoSize
+ *
+ * DESCRIPTION: Select the next available video size
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextVideoSize()
+{
+ useLock();
+ if ( mHardwareActive ) {
+ mCurrentVideoSizeIdx += 1;
+ mCurrentVideoSizeIdx %= mSupportedVideoSizes.size();
+ Size videoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+ mParams.setVideoSize(videoSize.width,
+ videoSize.height);
+ mCamera->setParameters(mParams.flatten());
+ mInterpr->setViVSize((Size) mSupportedVideoSizes.itemAt(
+ mCurrentVideoSizeIdx), mCameraIndex);
+ }
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setVideoSize
+ *
+ * DESCRIPTION: Set video size
+ *
+ * PARAMETERS :
+ * @format : format
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::setVideoSize(const char *format)
+{
+ useLock();
+ if ( mHardwareActive ) {
+ int newHeight;
+ int newWidth;
+ sscanf(format, "%dx%d", &newWidth, &newHeight);
+
+ unsigned int i;
+ for (i = 0; i < mSupportedVideoSizes.size(); ++i) {
+ Size PictureSize = mSupportedVideoSizes.itemAt(i);
+ if ( newWidth == PictureSize.width &&
+ newHeight == PictureSize.height )
+ {
+ break;
+ }
+
+ }
+ if ( i == mSupportedVideoSizes.size())
+ {
+ printf("Preview size %dx%d not supported !\n",
+ newWidth, newHeight);
+ return INVALID_OPERATION;
+ }
+
+ mParams.setVideoSize(newWidth,
+ newHeight);
+ mCamera->setParameters(mParams.flatten());
+ }
+
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getCurrentVideoSize
+ *
+ * DESCRIPTION : Get current video size
+ *
+ * PARAMETERS :
+ * @videoSize: video Size
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentVideoSize(Size &videoSize)
+{
+ useLock();
+ if ( mHardwareActive ) {
+ videoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+ }
+ signalFinished();
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : getCurrentPictureSize
+ *
+ * DESCRIPTION: queries the currently configured picture size
+ *
+ * PARAMETERS :
+ * @pictureSize : picture size currently configured
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentPictureSize(Size &pictureSize)
+{
+ useLock();
+ if ( mHardwareActive ) {
+ pictureSize = mSupportedPictureSizes.itemAt(mCurrentPictureSizeIdx);
+ }
+ signalFinished();
+ return NO_ERROR;
+}
+
+}; //namespace qcamera ends here
+
+using namespace qcamera;
+
+/*===========================================================================
+ * FUNCTION : printMenu
+ *
+ * DESCRIPTION: prints the available camera options
+ *
+ * PARAMETERS :
+ * @currentCamera : camera context currently being used
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::printMenu(sp<CameraContext> currentCamera)
+{
+ if ( !mDoPrintMenu ) return;
+ Size currentPictureSize, currentPreviewSize, currentVideoSize;
+ const char *zsl_mode = mParams.get(CameraContext::KEY_ZSL);
+
+ assert(currentCamera.get());
+
+ currentCamera->getCurrentPictureSize(currentPictureSize);
+ currentCamera->getCurrentPreviewSize(currentPreviewSize);
+ currentCamera->getCurrentVideoSize(currentVideoSize);
+
+ printf("\n\n=========== FUNCTIONAL TEST MENU ===================\n\n");
+
+ printf(" \n\nSTART / STOP / GENERAL SERVICES \n");
+ printf(" -----------------------------\n");
+ printf(" %c. Switch camera - Current Index: %d\n",
+ Interpreter::SWITCH_CAMERA_CMD,
+ currentCamera->getCameraIndex());
+ printf(" %c. Resume Preview after capture \n",
+ Interpreter::RESUME_PREVIEW_CMD);
+ printf(" %c. Quit \n",
+ Interpreter::EXIT_CMD);
+ printf(" %c. Camera Capability Dump",
+ Interpreter::DUMP_CAPS_CMD);
+
+ printf(" \n\n PREVIEW SUB MENU \n");
+ printf(" -----------------------------\n");
+ printf(" %c. Start Preview\n",
+ Interpreter::START_PREVIEW_CMD);
+ printf(" %c. Stop Preview\n",
+ Interpreter::STOP_PREVIEW_CMD);
+ printf(" %c. Preview size: %dx%d\n",
+ Interpreter::CHANGE_PREVIEW_SIZE_CMD,
+ currentPreviewSize.width,
+ currentPreviewSize.height);
+ printf(" %c. Video size: %dx%d\n",
+ Interpreter::CHANGE_VIDEO_SIZE_CMD,
+ currentVideoSize.width,
+ currentVideoSize.height);
+ printf(" %c. Start Recording\n",
+ Interpreter::START_RECORD_CMD);
+ printf(" %c. Stop Recording\n",
+ Interpreter::STOP_RECORD_CMD);
+ printf(" %c. Start ViV Recording\n",
+ Interpreter::START_VIV_RECORD_CMD);
+ printf(" %c. Stop ViV Recording\n",
+ Interpreter::STOP_VIV_RECORD_CMD);
+ printf(" %c. Enable preview frames\n",
+ Interpreter::ENABLE_PRV_CALLBACKS_CMD);
+ printf(" %c. Trigger autofocus \n",
+ Interpreter::AUTOFOCUS_CMD);
+
+ printf(" \n\n IMAGE CAPTURE SUB MENU \n");
+ printf(" -----------------------------\n");
+ printf(" %c. Take picture/Full Press\n",
+ Interpreter::TAKEPICTURE_CMD);
+ printf(" %c. Take picture in picture\n",
+ Interpreter::TAKEPICTURE_IN_PICTURE_CMD);
+ printf(" %c. Picture size: %dx%d\n",
+ Interpreter::CHANGE_PICTURE_SIZE_CMD,
+ currentPictureSize.width,
+ currentPictureSize.height);
+ printf(" %c. zsl: %s\n", Interpreter::ZSL_CMD,
+ (zsl_mode != NULL) ? zsl_mode : "NULL");
+
+ printf("\n Choice: ");
+}
+
+/*===========================================================================
+ * FUNCTION : enablePrintPreview
+ *
+ * DESCRIPTION: Enables printing the preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::enablePrintPreview()
+{
+ mDoPrintMenu = true;
+}
+
+/*===========================================================================
+ * FUNCTION : disablePrintPreview
+ *
+ * DESCRIPTION: Disables printing the preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::disablePrintPreview()
+{
+ mDoPrintMenu = false;
+}
+
+/*===========================================================================
+ * FUNCTION : enablePiPCapture
+ *
+ * DESCRIPTION: Enables picture in picture capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::enablePiPCapture()
+{
+ mPiPCapture = true;
+}
+
+/*===========================================================================
+ * FUNCTION : disablePiPCapture
+ *
+ * DESCRIPTION: Disables picture in picture capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::disablePiPCapture()
+{
+ mPiPCapture = false;
+}
+
+/*===========================================================================
+ * FUNCTION : getZSL
+ *
+ * DESCRIPTION: get ZSL value of current camera
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : current zsl value
+ *==========================================================================*/
+const char *CameraContext::getZSL()
+{
+ return mParams.get(CameraContext::KEY_ZSL);
+}
+
+/*===========================================================================
+ * FUNCTION : setZSL
+ *
+ * DESCRIPTION: set ZSL value of current camera
+ *
+ * PARAMETERS : zsl value to be set
+ *
+ * RETURN : None
+ *==========================================================================*/
+void CameraContext::setZSL(const char *value)
+{
+ mParams.set(CameraContext::KEY_ZSL, value);
+ mCamera->setParameters(mParams.flatten());
+}
+
+/*===========================================================================
+ * FUNCTION : configureViVCodec
+ *
+ * DESCRIPTION: Configures video in video codec
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t Interpreter::configureViVCodec()
+{
+ status_t ret = NO_ERROR;
+ char fileName[100];
+ sp<AMessage> format = new AMessage;
+ sp<ALooper> looper = new ALooper;
+
+ if (mTestContext->mViVVid.VideoSizes[0].width *
+ mTestContext->mViVVid.VideoSizes[0].height >=
+ mTestContext->mViVVid.VideoSizes[1].width *
+ mTestContext->mViVVid.VideoSizes[1].height) {
+ snprintf(fileName, sizeof(fileName) / sizeof(char), "/sdcard/ViV_vid_%dx%d_%d.mp4",
+ mTestContext->mViVVid.VideoSizes[0].width,
+ mTestContext->mViVVid.VideoSizes[0].height,
+ mTestContext->mViVVid.ViVIdx++);
+ format->setInt32("width", mTestContext->mViVVid.VideoSizes[0].width);
+ format->setInt32("height", mTestContext->mViVVid.VideoSizes[0].height);
+ } else {
+ snprintf(fileName, sizeof(fileName) / sizeof(char), "/sdcard/ViV_vid_%dx%d_%d.mp4",
+ mTestContext->mViVVid.VideoSizes[1].width,
+ mTestContext->mViVVid.VideoSizes[1].height,
+ mTestContext->mViVVid.ViVIdx++);
+ format->setInt32("width", mTestContext->mViVVid.VideoSizes[1].width);
+ format->setInt32("height", mTestContext->mViVVid.VideoSizes[1].height);
+ }
+ int fd = open(fileName, O_CREAT | O_RDWR );
+ if (fd < 0) {
+ LOGE("Error opening file");
+ return UNKNOWN_ERROR;
+ }
+ mTestContext->mViVVid.muxer = new MediaMuxer(
+ fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+
+ format->setString("mime", "video/avc");
+ format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+
+ format->setInt32("bitrate", 1000000);
+ format->setFloat("frame-rate", 30);
+ format->setInt32("i-frame-interval", 10);
+
+ looper->setName("ViV_recording_looper");
+ looper->start();
+ ALOGV("Creating codec");
+ mTestContext->mViVVid.codec = MediaCodec::CreateByType(
+ looper, "video/avc", true);
+ if (mTestContext->mViVVid.codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create video/avc codec instance\n");
+ return UNKNOWN_ERROR;
+ }
+ ret = mTestContext->mViVVid.codec->configure(format, NULL, NULL,
+ MediaCodec::CONFIGURE_FLAG_ENCODE);
+ if (ret != NO_ERROR) {
+ mTestContext->mViVVid.codec->release();
+ mTestContext->mViVVid.codec.clear();
+
+ fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", ret);
+ return ret;
+ }
+
+ ALOGV("Creating buffer producer");
+ ret = mTestContext->mViVVid.codec->createInputSurface(
+ &mTestContext->mViVVid.bufferProducer);
+ if (ret != NO_ERROR) {
+ mTestContext->mViVVid.codec->release();
+ mTestContext->mViVVid.codec.clear();
+
+ fprintf(stderr,
+ "ERROR: unable to create encoder input surface (err=%d)\n", ret);
+ return ret;
+ }
+
+ ret = mTestContext->mViVVid.codec->start();
+ if (ret != NO_ERROR) {
+ mTestContext->mViVVid.codec->release();
+ mTestContext->mViVVid.codec.clear();
+
+ fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", ret);
+ return ret;
+ }
+ ALOGV("Codec prepared");
+
+ mTestContext->mViVVid.surface = new Surface(
+ mTestContext->mViVVid.bufferProducer);
+ mTestContext->mViVVid.ANW = mTestContext->mViVVid.surface;
+ ret = native_window_api_connect(mTestContext->mViVVid.ANW.get(),
+ NATIVE_WINDOW_API_CPU);
+ if (mTestContext->mViVVid.VideoSizes[0].width *
+ mTestContext->mViVVid.VideoSizes[0].height >=
+ mTestContext->mViVVid.VideoSizes[1].width *
+ mTestContext->mViVVid.VideoSizes[1].height) {
+ native_window_set_buffers_format(mTestContext->mViVVid.ANW.get(),
+ HAL_PIXEL_FORMAT_NV12_ENCODEABLE);
+ native_window_set_buffers_dimensions(mTestContext->mViVVid.ANW.get(),
+ mTestContext->mViVVid.VideoSizes[0].width,
+ mTestContext->mViVVid.VideoSizes[0].height);
+ } else {
+ native_window_set_buffers_format(mTestContext->mViVVid.ANW.get(),
+ HAL_PIXEL_FORMAT_NV12_ENCODEABLE);
+ native_window_set_buffers_dimensions(mTestContext->mViVVid.ANW.get(),
+ mTestContext->mViVVid.VideoSizes[1].width,
+ mTestContext->mViVVid.VideoSizes[1].height);
+ }
+ native_window_set_usage(mTestContext->mViVVid.ANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN);
+ native_window_set_buffer_count(mTestContext->mViVVid.ANW.get(),
+ mTestContext->mViVVid.buff_cnt);
+
+ ViVEncoderThread();
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : unconfigureViVCodec
+ *
+ * DESCRIPTION: Unconfigures video in video codec
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t Interpreter::unconfigureViVCodec()
+{
+ status_t ret = NO_ERROR;
+
+ ret = native_window_api_disconnect(mTestContext->mViVVid.ANW.get(),
+ NATIVE_WINDOW_API_CPU);
+ mTestContext->mViVVid.bufferProducer = NULL;
+ mTestContext->mViVVid.codec->stop();
+ pthread_join(mViVEncThread, NULL);
+ mTestContext->mViVVid.muxer->stop();
+ mTestContext->mViVVid.codec->release();
+ mTestContext->mViVVid.codec.clear();
+ mTestContext->mViVVid.muxer.clear();
+ mTestContext->mViVVid.surface.clear();
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : Interpreter
+ *
+ * DESCRIPTION: Interpreter constructor
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+Interpreter::Interpreter(const char *file)
+ : mCmdIndex(0)
+ , mScript(NULL)
+{
+ if (!file){
+ printf("no File Given\n");
+ mUseScript = false;
+ return;
+ }
+
+ FILE *fh = fopen(file, "r");
+ if ( !fh ) {
+ printf("Could not open file %s\n", file);
+ mUseScript = false;
+ return;
+ }
+
+ fseek(fh, 0, SEEK_END);
+ size_t len = (size_t)ftell(fh);
+ rewind(fh);
+
+ if( !len ) {
+ printf("Script file %s is empty !\n", file);
+ fclose(fh);
+ return;
+ }
+
+ mScript = new char[len + 1];
+ if ( !mScript ) {
+ fclose(fh);
+ return;
+ }
+
+ fread(mScript, sizeof(char), len, fh);
+ mScript[len] = '\0'; // ensure null terminated;
+ fclose(fh);
+
+
+ char *p1;
+ char *p2;
+ p1 = p2 = mScript;
+
+ do {
+ switch (*p1) {
+ case '\0':
+ case '|':
+ p1++;
+ break;
+ case SWITCH_CAMERA_CMD:
+ case RESUME_PREVIEW_CMD:
+ case START_PREVIEW_CMD:
+ case STOP_PREVIEW_CMD:
+ case CHANGE_PREVIEW_SIZE_CMD:
+ case CHANGE_PICTURE_SIZE_CMD:
+ case START_RECORD_CMD:
+ case STOP_RECORD_CMD:
+ case START_VIV_RECORD_CMD:
+ case STOP_VIV_RECORD_CMD:
+ case DUMP_CAPS_CMD:
+ case AUTOFOCUS_CMD:
+ case TAKEPICTURE_CMD:
+ case TAKEPICTURE_IN_PICTURE_CMD:
+ case ENABLE_PRV_CALLBACKS_CMD:
+ case EXIT_CMD:
+ case ZSL_CMD:
+ case DELAY:
+ p2 = p1;
+ while( (p2 != (mScript + len)) && (*p2 != '|')) {
+ p2++;
+ }
+ *p2 = '\0';
+ if (p2 == (p1 + 1))
+ mCommands.push_back(Command(
+ static_cast<Interpreter::Commands_e>(*p1)));
+ else
+ mCommands.push_back(Command(
+ static_cast<Interpreter::Commands_e>(*p1), (p1 + 1)));
+ p1 = p2;
+ break;
+ default:
+ printf("Invalid cmd %c \n", *p1);
+ do {
+ p1++;
+
+ } while(*p1 != '|' && p1 != (mScript + len));
+
+ }
+ } while(p1 != (mScript + len));
+ mUseScript = true;
+}
+
+/*===========================================================================
+ * FUNCTION : ~Interpreter
+ *
+ * DESCRIPTION: Interpreter destructor
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+Interpreter::~Interpreter()
+{
+ if ( mScript )
+ delete[] mScript;
+
+ mCommands.clear();
+}
+
+/*===========================================================================
+ * FUNCTION : getCommand
+ *
+ * DESCRIPTION : Get a command from interpreter
+ *
+ * PARAMETERS :
+ * @currentCamera: Current camera context
+ *
+ * RETURN : command
+ *==========================================================================*/
+Interpreter::Command Interpreter::getCommand(
+ sp<CameraContext> currentCamera)
+{
+ if( mUseScript ) {
+ return mCommands[mCmdIndex++];
+ } else {
+ currentCamera->printMenu(currentCamera);
+ return Interpreter::Command(
+ static_cast<Interpreter::Commands_e>(getchar()));
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : TestContext
+ *
+ * DESCRIPTION : TestContext constructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+TestContext::TestContext()
+{
+ int i = 0;
+ mTestRunning = false;
+ mInterpreter = NULL;
+ mViVVid.ViVIdx = 0;
+ mViVVid.buff_cnt = 9;
+ mViVVid.graphBuf = 0;
+ mViVVid.mappedBuff = NULL;
+ mViVVid.isBuffValid = false;
+ mViVVid.sourceCameraID = -1;
+ mViVVid.destinationCameraID = -1;
+ mPiPinUse = false;
+ mViVinUse = false;
+ mIsZSLOn = false;
+ memset(&mViVBuff, 0, sizeof(ViVBuff_t));
+
+ ProcessState::self()->startThreadPool();
+
+ do {
+ camera[i] = new CameraContext(i);
+ if ( NULL == camera[i].get() ) {
+ break;
+ }
+ camera[i]->setTestCtxInstance(this);
+
+ //by default open only back camera
+ if (i==0) {
+ status_t stat = camera[i]->openCamera();
+ if ( NO_ERROR != stat ) {
+ printf("Error encountered Openging camera id : %d\n", i);
+ break;
+ }
+ }
+ mAvailableCameras.add(camera[i]);
+ i++;
+ } while ( i < camera[0]->getNumberOfCameras() ) ;
+
+ if (i < camera[0]->getNumberOfCameras() ) {
+ for (size_t j = 0; j < mAvailableCameras.size(); j++) {
+ camera[j] = mAvailableCameras.itemAt(j);
+ camera[j]->closeCamera();
+ camera[j].clear();
+ }
+
+ mAvailableCameras.clear();
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : ~TestContext
+ *
+ * DESCRIPTION : TestContext destructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+TestContext::~TestContext()
+{
+ delete mInterpreter;
+
+ for (size_t j = 0; j < mAvailableCameras.size(); j++) {
+ camera[j] = mAvailableCameras.itemAt(j);
+ camera[j]->closeCamera();
+ camera[j].clear();
+ }
+
+ mAvailableCameras.clear();
+}
+
+/*===========================================================================
+ * FUNCTION : GetCamerasNum
+ *
+ * DESCRIPTION : Get the number of available cameras
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : Number of cameras
+ *==========================================================================*/
+size_t TestContext::GetCamerasNum()
+{
+ return mAvailableCameras.size();
+}
+
+/*===========================================================================
+ * FUNCTION : AddScriptFromFile
+ *
+ * DESCRIPTION : Add script from file
+ *
+ * PARAMETERS :
+ * @scriptFile : Script file
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *==========================================================================*/
+status_t TestContext::AddScriptFromFile(const char *scriptFile)
+{
+ mInterpreter = new Interpreter(scriptFile);
+ mInterpreter->setTestCtxInst(this);
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : releasePiPBuff
+ *
+ * DESCRIPTION : Release video in video temp buffer
+ *
+ * PARAMETERS : None
+ *
+ * RETURN : None
+ *==========================================================================*/
+void Interpreter::releasePiPBuff() {
+ free(mTestContext->mViVBuff.buff);
+ mTestContext->mViVBuff.buff = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : functionalTest
+ *
+ * DESCRIPTION: queries and executes client supplied commands for testing a
+ * particular camera.
+ *
+ * PARAMETERS :
+ * @availableCameras : List with all cameras supported
+ *
+ * RETURN : status_t type of status
+ * NO_ERROR -- continue testing
+ * none-zero -- quit test
+ *==========================================================================*/
+status_t TestContext::FunctionalTest()
+{
+ status_t stat = NO_ERROR;
+ const char *ZSLStr = NULL;
+ size_t ZSLStrSize = 0;
+
+ assert(mAvailableCameras.size());
+
+ if ( !mInterpreter ) {
+ mInterpreter = new Interpreter();
+ mInterpreter->setTestCtxInst(this);
+ }
+
+ if (mAvailableCameras.size() == 0) {
+ printf("no cameras supported... exiting test app\n");
+ } else {
+ mTestRunning = true;
+ }
+
+ while (mTestRunning) {
+ sp<CameraContext> currentCamera =
+ mAvailableCameras.itemAt(mCurrentCameraIndex);
+ Interpreter::Command command =
+ mInterpreter->getCommand(currentCamera);
+ currentCamera->enablePrintPreview();
+
+ switch (command.cmd) {
+ case Interpreter::SWITCH_CAMERA_CMD:
+ {
+ mCurrentCameraIndex++;
+ mCurrentCameraIndex %= mAvailableCameras.size();
+ currentCamera = mAvailableCameras.itemAt(mCurrentCameraIndex);
+ stat = currentCamera->openCamera();
+ }
+ break;
+
+ case Interpreter::RESUME_PREVIEW_CMD:
+ {
+ stat = currentCamera->resumePreview();
+ }
+ break;
+
+ case Interpreter::START_PREVIEW_CMD:
+ {
+ stat = currentCamera->startPreview();
+ }
+ break;
+
+ case Interpreter::STOP_PREVIEW_CMD:
+ {
+ stat = currentCamera->stopPreview();
+ }
+ break;
+
+ case Interpreter::CHANGE_VIDEO_SIZE_CMD:
+ {
+ if ( command.arg )
+ stat = currentCamera->setVideoSize(command.arg);
+ else
+ stat = currentCamera->nextVideoSize();
+ }
+ break;
+
+ case Interpreter::CHANGE_PREVIEW_SIZE_CMD:
+ {
+ if ( command.arg )
+ stat = currentCamera->setPreviewSize(command.arg);
+ else
+ stat = currentCamera->nextPreviewSize();
+ }
+ break;
+
+ case Interpreter::CHANGE_PICTURE_SIZE_CMD:
+ {
+ if ( command.arg )
+ stat = currentCamera->setPictureSize(command.arg);
+ else
+ stat = currentCamera->nextPictureSize();
+ }
+ break;
+
+ case Interpreter::DUMP_CAPS_CMD:
+ {
+ currentCamera->printSupportedParams();
+ }
+ break;
+
+ case Interpreter::AUTOFOCUS_CMD:
+ {
+ stat = currentCamera->autoFocus();
+ }
+ break;
+
+ case Interpreter::TAKEPICTURE_CMD:
+ {
+ stat = currentCamera->takePicture();
+ }
+ break;
+
+ case Interpreter::TAKEPICTURE_IN_PICTURE_CMD:
+ {
+ if (mAvailableCameras.size() == 2) {
+ mSaveCurrentCameraIndex = mCurrentCameraIndex;
+ for (size_t i = 0; i < mAvailableCameras.size(); i++) {
+ mCurrentCameraIndex = i;
+ currentCamera = mAvailableCameras.itemAt(mCurrentCameraIndex);
+ currentCamera->enablePiPCapture();
+ stat = currentCamera->takePicture();
+ }
+ mCurrentCameraIndex = mSaveCurrentCameraIndex;
+ } else {
+ printf("Number of available sensors should be 2\n");
+ }
+ }
+ break;
+
+ case Interpreter::ENABLE_PRV_CALLBACKS_CMD:
+ {
+ stat = currentCamera->enablePreviewCallbacks();
+ }
+ break;
+
+ case Interpreter::START_RECORD_CMD:
+ {
+ stat = currentCamera->stopPreview();
+ stat = currentCamera->configureRecorder();
+ stat = currentCamera->startPreview();
+ stat = currentCamera->startRecording();
+ }
+ break;
+
+ case Interpreter::STOP_RECORD_CMD:
+ {
+ stat = currentCamera->stopRecording();
+
+ stat = currentCamera->stopPreview();
+ stat = currentCamera->unconfigureRecorder();
+ stat = currentCamera->startPreview();
+ }
+ break;
+
+ case Interpreter::START_VIV_RECORD_CMD:
+ {
+
+ if (mAvailableCameras.size() == 2) {
+ mSaveCurrentCameraIndex = mCurrentCameraIndex;
+ stat = mInterpreter->configureViVCodec();
+ for ( size_t i = 0; i < mAvailableCameras.size(); i++ ) {
+ mCurrentCameraIndex = i;
+ currentCamera = mAvailableCameras.itemAt(
+ mCurrentCameraIndex);
+ stat = currentCamera->stopPreview();
+ stat = currentCamera->configureViVRecording();
+ stat = currentCamera->startPreview();
+ stat = currentCamera->startViVRecording();
+ }
+ mCurrentCameraIndex = mSaveCurrentCameraIndex;
+ } else {
+ printf("Number of available sensors should be 2\n");
+ }
+
+ }
+ break;
+
+ case Interpreter::STOP_VIV_RECORD_CMD:
+ {
+ if (mAvailableCameras.size() == 2) {
+ mSaveCurrentCameraIndex = mCurrentCameraIndex;
+ for ( size_t i = 0; i < mAvailableCameras.size(); i++ ) {
+ mCurrentCameraIndex = i;
+ currentCamera = mAvailableCameras.itemAt(
+ mCurrentCameraIndex);
+ stat = currentCamera->stopViVRecording();
+ stat = currentCamera->stopPreview();
+ stat = currentCamera->unconfigureRecorder();
+ stat = currentCamera->startPreview();
+ }
+ stat = mInterpreter->unconfigureViVCodec();
+ mCurrentCameraIndex = mSaveCurrentCameraIndex;
+
+ mInterpreter->releasePiPBuff();
+ } else {
+ printf("Number of available sensors should be 2\n");
+ }
+ }
+ break;
+
+ case Interpreter::EXIT_CMD:
+ {
+ currentCamera->stopPreview();
+ mTestRunning = false;
+ }
+ break;
+
+ case Interpreter::DELAY:
+ {
+ if ( command.arg ) {
+ int delay = atoi(command.arg);
+ if (0 < delay) {
+ usleep(1000U * (unsigned int)delay);
+ }
+ }
+ }
+ break;
+
+ case Interpreter::ZSL_CMD:
+ {
+ currentCamera = mAvailableCameras.itemAt(
+ mCurrentCameraIndex);
+ ZSLStr = currentCamera->getZSL();
+
+ if (NULL != ZSLStr) {
+ ZSLStrSize = strlen(ZSLStr);
+ if (!strncmp(ZSLStr, "off", ZSLStrSize)) {
+ currentCamera->setZSL("on");
+ mIsZSLOn = true;
+ } else if (!strncmp(ZSLStr, "on", ZSLStrSize)) {
+ currentCamera->setZSL("off");
+ mIsZSLOn = false;
+ } else {
+ printf("Set zsl failed!\n");
+ }
+ } else {
+ printf("zsl is NULL\n");
+ }
+ }
+ break;
+
+ default:
+ {
+ currentCamera->disablePrintPreview();
+ }
+ break;
+ }
+ printf("Command status 0x%x \n", stat);
+ }
+
+ return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : PiPLock
+ *
+ * DESCRIPTION: Mutex lock for PiP capture
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void TestContext::PiPLock()
+{
+ Mutex::Autolock l(mPiPLock);
+ while (mPiPinUse) {
+ mPiPCond.wait(mPiPLock);
+ }
+ mPiPinUse = true;
+}
+
+/*===========================================================================
+ * FUNCTION : PiPUnLock
+ *
+ * DESCRIPTION: Mutex unlock for PiP capture
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void TestContext::PiPUnlock()
+{
+ Mutex::Autolock l(mPiPLock);
+ mPiPinUse = false;
+ mPiPCond.signal();
+}
+
+/*===========================================================================
+ * FUNCTION : ViVLock
+ *
+ * DESCRIPTION: Mutex lock for ViV Video
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void TestContext::ViVLock()
+{
+ Mutex::Autolock l(mViVLock);
+ while (mViVinUse) {
+ mViVCond.wait(mViVLock);
+ }
+ mViVinUse = true;
+}
+
+/*===========================================================================
+ * FUNCTION : ViVUnlock
+ *
+ * DESCRIPTION: Mutex unlock for ViV Video
+ *
+ * PARAMETERS : none
+ *
+ * RETURN : none
+ *==========================================================================*/
+void TestContext::ViVUnlock()
+{
+ Mutex::Autolock l(mViVLock);
+ mViVinUse = false;
+ mViVCond.signal();
+}
+
+/*===========================================================================
+ * FUNCTION : setViVSize
+ *
+ * DESCRIPTION : Set video in video size
+ *
+ * PARAMETERS :
+ * @VideoSize : video size
+ * @camIndex : camera index
+ *
+ * RETURN : none
+ *==========================================================================*/
+void TestContext::setViVSize(Size VideoSize, int camIndex)
+{
+ mViVVid.VideoSizes[camIndex] = VideoSize;
+}
+
+/*===========================================================================
+ * FUNCTION : main
+ *
+ * DESCRIPTION : main function
+ *
+ * PARAMETERS :
+ * @argc : argc
+ * @argv : argv
+ *
+ * RETURN : int status
+ *==========================================================================*/
+int main(int argc, char *argv[])
+{
+ TestContext ctx;
+
+ if (argc > 1) {
+ if ( ctx.AddScriptFromFile((const char *)argv[1]) ) {
+ printf("Could not add script file... "
+ "continuing in normal menu mode! \n");
+ }
+ }
+
+ ctx.FunctionalTest();
+
+ return 0;
+}
diff --git a/camera/QCamera2/HAL/test/qcamera_test.h b/camera/QCamera2/HAL/test/qcamera_test.h
new file mode 100644
index 0000000..b8c5998
--- /dev/null
+++ b/camera/QCamera2/HAL/test/qcamera_test.h
@@ -0,0 +1,361 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef QCAMERA_TEST_H
+#define QCAMERA_TEST_H
+
+#include <SkData.h>
+#include <SkBitmap.h>
+#include <SkStream.h>
+
+namespace qcamera {
+
+using namespace android;
+
+#define MAX_CAM_INSTANCES 3
+
+class TestContext;
+
+class CameraContext : public CameraListener,
+ public ICameraRecordingProxyListener{
+public:
+ typedef enum {
+ READ_METADATA = 1,
+ READ_IMAGE = 2,
+ READ_ALL = 3
+ } ReadMode_t;
+
+ // This structure is used to store jpeg file sections in memory.
+ typedef struct {
+ unsigned char * Data;
+ int Type;
+ size_t Size;
+ } Sections_t;
+
+public:
+ static const char KEY_ZSL[];
+
+ CameraContext(int cameraIndex);
+ virtual ~CameraContext();
+
+
+
+ status_t openCamera();
+ status_t closeCamera();
+
+ status_t startPreview();
+ status_t stopPreview();
+ status_t resumePreview();
+ status_t autoFocus();
+ status_t enablePreviewCallbacks();
+ status_t takePicture();
+ status_t startRecording();
+ status_t stopRecording();
+ status_t startViVRecording();
+ status_t stopViVRecording();
+ status_t configureViVRecording();
+
+ status_t nextPreviewSize();
+ status_t setPreviewSize(const char *format);
+ status_t getCurrentPreviewSize(Size &previewSize);
+
+ status_t nextPictureSize();
+ status_t getCurrentPictureSize(Size &pictureSize);
+ status_t setPictureSize(const char *format);
+
+ status_t nextVideoSize();
+ status_t setVideoSize(const char *format);
+ status_t getCurrentVideoSize(Size &videoSize);
+ status_t configureRecorder();
+ status_t unconfigureRecorder();
+ Sections_t *FindSection(int SectionType);
+ status_t ReadSectionsFromBuffer (unsigned char *buffer,
+ size_t buffer_size, ReadMode_t ReadMode);
+ virtual IBinder* onAsBinder();
+ void setTestCtxInstance(TestContext *instance);
+
+ void printMenu(sp<CameraContext> currentCamera);
+ void printSupportedParams();
+ const char *getZSL();
+ void setZSL(const char *value);
+
+
+ int getCameraIndex() { return mCameraIndex; }
+ int getNumberOfCameras();
+ void enablePrintPreview();
+ void disablePrintPreview();
+ void enablePiPCapture();
+ void disablePiPCapture();
+ void CheckSectionsAllocated();
+ void DiscardData();
+ void DiscardSections();
+ size_t calcBufferSize(int width, int height);
+ size_t calcStride(int width);
+ size_t calcYScanLines(int height);
+ size_t calcUVScanLines(int height);
+
+ virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+ virtual void postData(int32_t msgType,
+ const sp<IMemory>& dataPtr,
+ camera_frame_metadata_t *metadata);
+
+ virtual void postDataTimestamp(nsecs_t timestamp,
+ int32_t msgType,
+ const sp<IMemory>& dataPtr);
+ virtual void dataCallbackTimestamp(nsecs_t timestamp,
+ int32_t msgType,
+ const sp<IMemory>& dataPtr);
+
+private:
+
+ status_t createPreviewSurface(int width, int height, int32_t pixFormat);
+ status_t destroyPreviewSurface();
+
+ status_t saveFile(const sp<IMemory>& mem, String8 path);
+ SkBitmap * PiPCopyToOneFile(SkBitmap *bitmap0, SkBitmap *bitmap1);
+ status_t decodeJPEG(const sp<IMemory>& mem, SkBitmap *skBM);
+ status_t encodeJPEG(SkWStream * stream, const SkBitmap *bitmap,
+ String8 path);
+ void previewCallback(const sp<IMemory>& mem);
+
+ static int JpegIdx;
+ int mCameraIndex;
+ bool mResizePreview;
+ bool mHardwareActive;
+ bool mPreviewRunning;
+ bool mRecordRunning;
+ int mVideoFd;
+ int mVideoIdx;
+ bool mRecordingHint;
+ bool mDoPrintMenu;
+ bool mPiPCapture;
+ static int mPiPIdx;
+ unsigned int mfmtMultiplier;
+ int mWidthTmp;
+ int mHeightTmp;
+ size_t mSectionsRead;
+ size_t mSectionsAllocated;
+ Sections_t * mSections;
+ Sections_t * mJEXIFTmp;
+ Sections_t mJEXIFSection;
+ int mHaveAll;
+ TestContext *mInterpr;
+
+ sp<Camera> mCamera;
+ sp<SurfaceComposerClient> mClient;
+ sp<SurfaceControl> mSurfaceControl;
+ sp<Surface> mPreviewSurface;
+ sp<MediaRecorder> mRecorder;
+ CameraParameters mParams;
+ SkBitmap *skBMDec;
+ SkImageEncoder* skJpegEnc;
+ SkBitmap skBMtmp;
+ sp<IMemory> PiPPtrTmp;
+
+ size_t mCurrentPreviewSizeIdx;
+ Size getPreviewSizeFromVideoSizes(Size currentVideoSize);
+ size_t mCurrentPictureSizeIdx;
+ size_t mCurrentVideoSizeIdx;
+ Vector<Size> mSupportedPreviewSizes;
+ Vector<Size> mSupportedPictureSizes;
+ Vector<Size> mSupportedVideoSizes;
+
+ bool mInUse;
+ Mutex mLock;
+ Condition mCond;
+
+ void useLock();
+ void signalFinished();
+
+ //------------------------------------------------------------------------
+ // JPEG markers consist of one or more 0xFF bytes, followed by a marker
+ // code byte (which is not an FF). Here are the marker codes of interest
+ // in this program. (See jdmarker.c for a more complete list.)
+ //------------------------------------------------------------------------
+ #define M_SOF0 0xC0 // Start Of Frame N
+ #define M_SOF1 0xC1 // N indicates which compression process
+ #define M_SOF2 0xC2 // Only SOF0-SOF2 are now in common use
+ #define M_SOF3 0xC3
+ #define M_SOF5 0xC5 // NB: codes C4 and CC are NOT SOF markers
+ #define M_SOF6 0xC6
+ #define M_SOF7 0xC7
+ #define M_SOF9 0xC9
+ #define M_SOF10 0xCA
+ #define M_SOF11 0xCB
+ #define M_SOF13 0xCD
+ #define M_SOF14 0xCE
+ #define M_SOF15 0xCF
+ #define M_SOI 0xD8 // Start Of Image (beginning of datastream)
+ #define M_EOI 0xD9 // End Of Image (end of datastream)
+ #define M_SOS 0xDA // Start Of Scan (begins compressed data)
+ #define M_JFIF 0xE0 // Jfif marker
+ #define M_EXIF 0xE1 // Exif marker. Also used for XMP data!
+ #define M_XMP 0x10E1 // Not a real tag same value as Exif!
+ #define M_COM 0xFE // COMment
+ #define M_DQT 0xDB
+ #define M_DHT 0xC4
+ #define M_DRI 0xDD
+ #define M_IPTC 0xED // IPTC marker
+ #define PSEUDO_IMAGE_MARKER 0x123; // Extra value.
+};
+
+class Interpreter
+{
+public:
+ enum Commands_e {
+ SWITCH_CAMERA_CMD = 'A',
+ RESUME_PREVIEW_CMD = '[',
+ START_PREVIEW_CMD = '1',
+ STOP_PREVIEW_CMD = '2',
+ CHANGE_VIDEO_SIZE_CMD = '3',
+ CHANGE_PREVIEW_SIZE_CMD = '4',
+ CHANGE_PICTURE_SIZE_CMD = '5',
+ START_RECORD_CMD = '6',
+ STOP_RECORD_CMD = '7',
+ START_VIV_RECORD_CMD = '8',
+ STOP_VIV_RECORD_CMD = '9',
+ DUMP_CAPS_CMD = 'E',
+ AUTOFOCUS_CMD = 'f',
+ TAKEPICTURE_CMD = 'p',
+ TAKEPICTURE_IN_PICTURE_CMD = 'P',
+ ENABLE_PRV_CALLBACKS_CMD = '&',
+ EXIT_CMD = 'q',
+ DELAY = 'd',
+ ZSL_CMD = 'z',
+ INVALID_CMD = '0'
+ };
+
+ struct Command {
+ Command( Commands_e cmd_, char *arg_ = NULL)
+ : cmd(cmd_)
+ , arg(arg_) {}
+ Command()
+ : cmd(INVALID_CMD)
+ , arg(NULL) {}
+ Commands_e cmd;
+ char *arg;
+ };
+
+ /* API */
+ Interpreter()
+ : mUseScript(false)
+ , mScript(NULL) {}
+
+ Interpreter(const char *file);
+ ~Interpreter();
+
+ Command getCommand(sp<CameraContext> currentCamera);
+ void releasePiPBuff();
+ status_t configureViVCodec();
+ void setViVSize(Size VideoSize, int camIndex);
+ void setTestCtxInst(TestContext *instance);
+ status_t unconfigureViVCodec();
+ status_t ViVEncoderThread();
+ void ViVEncode();
+ static void *ThreadWrapper(void *context);
+
+private:
+ static const int numberOfCommands;
+
+ bool mUseScript;
+ size_t mCmdIndex;
+ char *mScript;
+ Vector<Command> mCommands;
+ TestContext *mTestContext;
+ pthread_t mViVEncThread;
+};
+
+class TestContext
+{
+ friend class CameraContext;
+ friend class Interpreter;
+public:
+ TestContext();
+ ~TestContext();
+
+ size_t GetCamerasNum();
+ status_t FunctionalTest();
+ status_t AddScriptFromFile(const char *scriptFile);
+ void setViVSize(Size VideoSize, int camIndex);
+ void PiPLock();
+ void PiPUnlock();
+ void ViVLock();
+ void ViVUnlock();
+
+private:
+ sp<CameraContext> camera[MAX_CAM_INSTANCES];
+ char GetNextCmd(sp<qcamera::CameraContext> currentCamera);
+ size_t mCurrentCameraIndex;
+ size_t mSaveCurrentCameraIndex;
+ Vector< sp<qcamera::CameraContext> > mAvailableCameras;
+ bool mTestRunning;
+ Interpreter *mInterpreter;
+ Mutex mPiPLock;
+ Condition mPiPCond;
+ bool mPiPinUse;
+ Mutex mViVLock;
+ Condition mViVCond;
+ bool mViVinUse;
+ bool mIsZSLOn;
+
+ typedef struct ViVBuff_t{
+ void *buff;
+ size_t buffSize;
+ size_t YStride;
+ size_t UVStride;
+ size_t YScanLines;
+ size_t UVScanLines;
+ size_t srcWidth;
+ size_t srcHeight;
+ } ViVBuff_t;
+
+ typedef struct ViVVid_t{
+ sp<IGraphicBufferProducer> bufferProducer;
+ sp<Surface> surface;
+ sp<MediaCodec> codec;
+ sp<MediaMuxer> muxer;
+ sp<ANativeWindow> ANW;
+ Vector<sp<ABuffer> > buffers;
+ Size VideoSizes[2];
+ int ViVIdx;
+ size_t buff_cnt;
+ sp<GraphicBuffer> graphBuf;
+ void * mappedBuff;
+ bool isBuffValid;
+ int sourceCameraID;
+ int destinationCameraID;
+ } vidPiP_t;
+
+ ViVVid_t mViVVid;
+ ViVBuff_t mViVBuff;
+};
+
+}; //namespace qcamera
+
+#endif
diff --git a/camera/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h b/camera/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h
new file mode 100644
index 0000000..5197447
--- /dev/null
+++ b/camera/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_DETECTFACE_ENGINE_H__
+#define __TS_DETECTFACE_ENGINE_H__
+#include "ts_makeup_data.h"
+#include "ts_makeup_image.h"
+
+ typedef void* TSHandle;
+
+ /*===========================================================================
+ * FUNCTION : ts_detectface_create_context
+ *
+ * DESCRIPTION: create context.The method MUST call at first time.
+ *
+ *
+ * RETURN : TSHandle as the context handle
+ *
+ *==========================================================================*/
+ TSHandle ts_detectface_create_context();
+
+
+ /*===========================================================================
+ * FUNCTION : ts_detectface_destroy_context
+ *
+ * DESCRIPTION: destroy context. The method MUST call at last time.
+ * Before you MUST call ts_detectface_create_context method
+ * to create context and get context handle.
+ *
+ * PARAMETERS :
+ * @param[in] contexTSHandle : The context handle pointer.
+ *
+ *
+ *==========================================================================*/
+ void ts_detectface_destroy_context(TSHandle* contexTSHandle);
+
+
+ /*===========================================================================
+ * FUNCTION : ts_detectface_detect
+ *
+ * DESCRIPTION: start detect.Before you MUST call ts_detectface_create_context method
+ * to create context and get context handle.
+ *
+ * PARAMETERS :
+ * @param[in] contexTSHandle : The context handle.
+ * @param[in] pInData : The TSMakeupData pointer.MUST not NULL.
+ *
+ * RETURN : int If less than zero failed, otherwise the number of the detected faces.
+ *
+ *==========================================================================*/
+ int ts_detectface_detect(TSHandle contexTSHandle, TSMakeupData *pInData);
+
+ /*===========================================================================
+ * FUNCTION : ts_detectface_detectEx
+ *
+ * DESCRIPTION: start detect.Before you MUST call ts_detectface_create_context method
+ * to create context and get context handle.
+ *
+ * PARAMETERS :
+ * @param[in] contexTSHandle : The context handle.
+ * @param[in] pInData : The TSMakeupDataEx pointer.MUST not NULL.
+ *
+ * RETURN : int If less than zero failed, otherwise the number of the detected faces.
+ *
+ *==========================================================================*/
+ int ts_detectface_detectEx(TSHandle contexTSHandle, TSMakeupDataEx *pInData);
+ /*===========================================================================
+ * FUNCTION : ts_detectface_get_face_info
+ *
+ * DESCRIPTION: get detected face information.Before you MUST call ts_detectface_detect method
+ * to detect face.
+ *
+ * PARAMETERS :
+ * @param[in] contexTSHandle : The context handle.
+ * @param[in] index : The face index.MUST > 0.
+ * @param[out] pFaceRect : The face rects.MUST not NULL.
+ * @param[out] leftEye : The left eye rect.
+ * @param[out] rightEye : The right eye rect.
+ * @param[out] pMouth : The mount rect.
+ *
+ * RETURN : TS_OK if success, otherwise failed.
+ *
+ *==========================================================================*/
+ int ts_detectface_get_face_info(TSHandle contexTSHandle, int index, TSRect *pFaceRect, TSRect *leftEye, TSRect *rightEye, TSRect *pMouth);
+
+#endif // __TS_DETECTFACE_ENGINE_H__
diff --git a/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h b/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h
new file mode 100644
index 0000000..ac43713
--- /dev/null
+++ b/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_MAKEUP_DATA_H__
+#define __TS_MAKEUP_DATA_H__
+
+ #define TS_OK (0x00000000) //Successful
+ #define TS_ERROR_PARAM (0x00000001) //Parameters error
+ #define TS_ERROR_IO (0x00000002) //Input or output error
+ #define TS_ERROR_INTERNAL (0x00000003) //Internal error
+ #define TS_NO_MEMORY (0x00000004) //No memory error
+
+
+ /*
+ * Data struct : rectangle
+ */
+ typedef struct __tag_tsrect
+ {
+ long left;
+ long top;
+ long right;
+ long bottom;
+ } TSRect;
+
+ /*
+ * Data struct : point
+ */
+ typedef struct __tag_tsmakeuppoint
+ {
+ long x;
+ long y;
+ } TSPoint;
+
+
+#endif // __TS_MAKEUP_DATA_H__
diff --git a/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h b/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h
new file mode 100644
index 0000000..375130d
--- /dev/null
+++ b/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_MAKEUP_ENGINI_H__
+#define __TS_MAKEUP_ENGINI_H__
+#include "ts_makeup_data.h"
+#include "ts_makeup_image.h"
+
+
+ /*
+ * FUNCTION : ts_makeup_get_supported_face_num
+ *
+ * DESCRIPTION: get supported face number
+ *
+ * RETURN : The supported face number
+ *
+ */
+ int ts_makeup_get_supported_face_num();
+
+
+ /*
+ * FUNCTION : ts_makeup_skin_beauty
+ *
+ * DESCRIPTION: skin beauty method.
+ *
+ * PARAMETERS :
+ * @param[in] pInData : The TSMakeupData pointer.MUST not NULL.
+ * @param[out] pOutData : The TSMakeupData pointer.MUST not NULL.
+ * @param[in] pFaceRect : The face rect.MUST not NULL.
+ * @param[in] cleanLevel : Skin clean level, value range [0,100].
+ * @param[in] whiteLevel : Skin white level, value range [0,100].
+ * RETURN : TS_OK if success, otherwise failed.
+ *
+ */
+ int ts_makeup_skin_beauty(TSMakeupData *pInData, TSMakeupData *pOutData, const TSRect *pFaceRect, int cleanLevel,int whiteLevel);
+ /*
+ * FUNCTION : ts_makeup_skin_beautyEx
+ *
+ * DESCRIPTION: skin beauty method.
+ *
+ * PARAMETERS :
+ * @param[in] pInData : The TSMakeupDataEx pointer.MUST not NULL.
+ * @param[out] pOutData : The TSMakeupDataEx pointer.MUST not NULL.
+ * @param[in] pFaceRect : The face rect.MUST not NULL.
+ * @param[in] cleanLevel : Skin clean level, value range [0,100].
+ * @param[in] whiteLevel : Skin white level, value range [0,100].
+ * RETURN : TS_OK if success, otherwise failed.
+ *
+ */
+ int ts_makeup_skin_beautyEx(TSMakeupDataEx *pInData, TSMakeupDataEx *pOutData, const TSRect *pFaceRect, int cleanLevel, int whiteLevel);
+ /*
+ * FUNCTION : ts_makeup_finish
+ *
+ * DESCRIPTION: Finish makeup,call this method at last time.
+ * This method MUST be called After ts_makeup_skin_clean and ts_makeup_skin_whiten
+ *
+ */
+ void ts_makeup_finish();
+
+
+ /*
+ * FUNCTION : ts_makeup_warp_face
+ *
+ * DESCRIPTION: do warp face.
+ *
+ * PARAMETERS :
+ * @param[in] pInData : The TSMakeupData pointer.MUST not NULL.
+ * @param[out] pOutData : The TSMakeupData pointer.MUST not NULL.
+ * @param[in] pLeftEye : The left eye rect pointer.MUST not NULL.
+ * @param[in] pRightEye : The right eye rect pointer.MUST not NULL.
+ * @param[in] pMouth : The mouth rect pointer.MUST not NULL.
+ * @param[in] bigEyeLevel : The big eye level, value range [0,100].
+ * @param[in] trimFaceLevel : The trim face level, value range [0,100].
+ *
+ * RETURN : TS_OK if success, otherwise failed.
+ *
+ */
+ int ts_makeup_warp_face(TSMakeupData *pInData, TSMakeupData *pOutData,
+ const TSRect *pLeftEye, const TSRect *pRightEye, const TSRect *pMouth, int bigEyeLevel, int trimFaceLevel);
+
+#endif // __TS_MAKEUP_ENGINI_H__
diff --git a/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h b/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h
new file mode 100644
index 0000000..5621d3f
--- /dev/null
+++ b/camera/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_MAKEUP_IMGAGE_H__
+#define __TS_MAKEUP_IMGAGE_H__
+
+ /*
+ * Data struct : TSMakeupData
+ */
+ typedef struct __tag_tsmakeupdata
+ {
+ int frameWidth; //NV21 Frame width.MUST > 0.
+ int frameHeight; //NV21 Frame height. MUST > 0.
+ unsigned char *yBuf; //NV21 Y buffer pointer.MUST not null.
+ unsigned char *uvBuf; //NV21 UV buffer pointer.MUST not null.
+ }TSMakeupData;
+
+ /*
+ * Data struct : TSMakeupDataEx
+ */
+ typedef struct __tag_tsmakeupdataEx
+ {
+ int frameWidth; //NV21 Frame width.MUST > 0.
+ int frameHeight; //NV21 Frame height. MUST > 0.
+ unsigned char *yBuf; //NV21 Y buffer pointer.MUST not null.
+ unsigned char *uvBuf; //NV21 UV buffer pointer.MUST not null.
+ int yStride; //NV21 Y buffer stride len
+ int uvStride; //NV21 uv buffer stride len
+ }TSMakeupDataEx;
+
+
+#endif // __TS_MAKEUP_IMGAGE_H__
diff --git a/camera/QCamera2/HAL/wrapper/QualcommCamera.cpp b/camera/QCamera2/HAL/wrapper/QualcommCamera.cpp
new file mode 100644
index 0000000..e964cd9
--- /dev/null
+++ b/camera/QCamera2/HAL/wrapper/QualcommCamera.cpp
@@ -0,0 +1,450 @@
+/* Copyright (c) 2011-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommCamera"
+
+// System dependencies
+#include <utils/threads.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/RefBase.h>
+
+extern "C" {
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+}
+
+// Camera dependencies
+#include "QualcommCamera.h"
+#include "QCamera2Factory.h"
+#include "QCamera2HWI.h"
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+
+static hw_module_methods_t camera_module_methods = {
+ open: camera_device_open,
+};
+
+static hw_module_t camera_common = {
+ tag: HARDWARE_MODULE_TAG,
+ module_api_version: CAMERA_MODULE_API_VERSION_1_0,
+ hal_api_version: HARDWARE_HAL_API_VERSION,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "QCamera Module",
+ author: "Quic on behalf of CAF",
+ methods: &camera_module_methods,
+ dso: NULL,
+ reserved: {0},
+};
+
+using namespace qcamera;
+namespace android {
+
+typedef struct {
+ camera_device hw_dev;
+ QCamera2HardwareInterface *hardware;
+ int camera_released;
+ int cameraId;
+} camera_hardware_t;
+
+typedef struct {
+ camera_memory_t mem;
+ int32_t msgType;
+ sp<IMemory> dataPtr;
+ void* user;
+ unsigned int index;
+} q_cam_memory_t;
+
+QCamera2HardwareInterface *util_get_Hal_obj( struct camera_device * device)
+{
+ QCamera2HardwareInterface *hardware = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware = camHal->hardware;
+ }
+ return hardware;
+}
+
+extern "C" int get_number_of_cameras()
+{
+ /* try to query every time we get the call!*/
+
+ ALOGE("Q%s: E");
+ return QCamera2Factory::get_number_of_cameras();
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rc = -1;
+ ALOGE("Q%s: E");
+
+ if(info) {
+ QCamera2Factory::get_camera_info(camera_id, info);
+ }
+ LOGD("Q%s: X");
+ return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int camera_device_open(
+ const struct hw_module_t* module, const char* id,
+ struct hw_device_t** hw_device)
+{
+ int rc = -1;
+ camera_device *device = NULL;
+
+ if(module && id && hw_device) {
+ if (!strcmp(module->name, camera_common.name)) {
+ int cameraId = atoi(id);
+
+ camera_hardware_t *camHal =
+ (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+ if(!camHal) {
+ *hw_device = NULL;
+ ALOGE(" end in no mem");
+ return rc;
+ }
+ /* we have the camera_hardware obj malloced */
+ memset(camHal, 0, sizeof (camera_hardware_t));
+ camHal->hardware = new QCamera2HardwareInterface((uint32_t)cameraId);
+ if (camHal->hardware) {
+ camHal->cameraId = cameraId;
+ device = &camHal->hw_dev;
+ device->common.close = close_camera_device;
+ device->ops = &QCamera2HardwareInterface::mCameraOps;
+ device->priv = (void *)camHal;
+ rc = 0;
+ } else {
+ if (camHal->hardware) {
+ delete camHal->hardware;
+ camHal->hardware = NULL;
+ }
+ free(camHal);
+ device = NULL;
+ goto EXIT;
+ }
+ }
+ }
+ /* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
+ *hw_device = (hw_device_t*)&device->common;
+
+EXIT:
+
+ ALOGE(" end rc %d", rc);
+ return rc;
+}
+
+extern "C" int close_camera_device( hw_device_t *hw_dev)
+{
+ ALOGE("Q%s: device =%p E", hw_dev);
+ int rc = -1;
+ camera_device_t *device = (camera_device_t *)hw_dev;
+
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal ) {
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj( device);
+ if(!camHal->camera_released) {
+ if(hardware != NULL) {
+ hardware->release(device);
+ }
+ }
+ if(hardware != NULL)
+ delete hardware;
+ free(camHal);
+ }
+ rc = 0;
+ }
+ return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+
+ if(hardware != NULL) {
+ rc = hardware->set_preview_window(device, window);
+ }
+ return rc;
+}
+
+void set_CallBacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ ALOGE("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->set_CallBacks(device, notify_cb,data_cb, data_cb_timestamp, get_memory, user);
+ }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->enable_msg_type(device, msg_type);
+ }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ ALOGE("Q%s: E");
+ if(hardware != NULL){
+ hardware->disable_msg_type(device, msg_type);
+ }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->msg_type_enabled(device, msg_type);
+ }
+ return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->start_preview(device);
+ }
+ ALOGE("Q%s: X");
+ return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stop_preview(device);
+ }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->preview_enabled(device);
+ }
+ return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->store_meta_data_in_buffers(device, enable);
+ }
+ return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->start_recording(device);
+ }
+ return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stop_recording(device);
+ }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->recording_enabled(device);
+ }
+ return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ LOGD("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->release_recording_frame(device, opaque);
+ }
+}
+
+int auto_focus(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->auto_focus(device);
+ }
+ return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancel_auto_focus(device);
+ }
+ return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->take_picture(device);
+ }
+ return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancel_picture(device);
+ }
+ return rc;
+}
+
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL && parms){
+ rc = hardware->set_parameters(device, parms);
+ }
+ return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ char *parms = NULL;
+ parms = hardware->get_parameters(device);
+ return parms;
+ }
+ return NULL;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+ ALOGE("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->put_parameters(device, parm);
+ }
+}
+
+int send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->send_command(device, cmd, arg1, arg2);
+ }
+ return rc;
+}
+
+void release(struct camera_device * device)
+{
+ ALOGE("Q%s: E");
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware->release(device);
+ camHal->camera_released = true;
+ }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+ ALOGE("Q%s: E");
+ int rc = -1;
+ QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->dump(device, fd);
+ }
+ return rc;
+}
+
+}; // namespace android
diff --git a/camera/QCamera2/HAL/wrapper/QualcommCamera.h b/camera/QCamera2/HAL/wrapper/QualcommCamera.h
new file mode 100644
index 0000000..6caa3ca
--- /dev/null
+++ b/camera/QCamera2/HAL/wrapper/QualcommCamera.h
@@ -0,0 +1,107 @@
+/* Copyright (c) 2011-2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+
+extern "C" {
+
+ int get_number_of_cameras();
+ int get_camera_info(int camera_id, struct camera_info *info);
+
+ int camera_device_open(const struct hw_module_t* module, const char* id,
+ struct hw_device_t** device);
+
+ hw_device_t * open_camera_device(int cameraId);
+
+ int close_camera_device( hw_device_t *);
+
+namespace android {
+ int set_preview_window(struct camera_device *,
+ struct preview_stream_ops *window);
+ void set_CallBacks(struct camera_device *,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+ void disable_msg_type(struct camera_device *, int32_t msg_type);
+ int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+ int start_preview(struct camera_device *);
+
+ void stop_preview(struct camera_device *);
+
+ int preview_enabled(struct camera_device *);
+ int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+ int start_recording(struct camera_device *);
+
+ void stop_recording(struct camera_device *);
+
+ int recording_enabled(struct camera_device *);
+
+ void release_recording_frame(struct camera_device *,
+ const void *opaque);
+
+ int auto_focus(struct camera_device *);
+
+ int cancel_auto_focus(struct camera_device *);
+
+ int take_picture(struct camera_device *);
+
+ int cancel_picture(struct camera_device *);
+
+ int set_parameters(struct camera_device *, const char *parms);
+
+ char* get_parameters(struct camera_device *);
+
+ void put_parameters(struct camera_device *, char *);
+
+ int send_command(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+
+ void release(struct camera_device *);
+
+ int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+