aboutsummaryrefslogtreecommitdiff
path: root/camera/QCamera2/stack/mm-camera-interface/src
diff options
context:
space:
mode:
Diffstat (limited to 'camera/QCamera2/stack/mm-camera-interface/src')
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c2397
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c3639
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c2052
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c294
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c4581
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c698
6 files changed, 13661 insertions, 0 deletions
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
new file mode 100644
index 0000000..a559815
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -0,0 +1,2397 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+ (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+ ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+/* internal function declare */
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ uint8_t reg_flag);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+ mm_camera_event_t *event);
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_channel_by_handler
+ *
+ * DESCRIPTION: utility function to get a channel object from its handle
+ *
+ * PARAMETERS :
+ * @cam_obj: ptr to a camera object
+ * @handler: channel handle
+ *
+ * RETURN : ptr to a channel object.
+ * NULL if failed.
+ *==========================================================================*/
+mm_channel_t * mm_camera_util_get_channel_by_handler(
+ mm_camera_obj_t * cam_obj,
+ uint32_t handler)
+{
+ int i;
+ mm_channel_t *ch_obj = NULL;
+ for(i = 0; i < MM_CAMERA_CHANNEL_MAX; i++) {
+ if (handler == cam_obj->ch[i].my_hdl) {
+ ch_obj = &cam_obj->ch[i];
+ break;
+ }
+ }
+ return ch_obj;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_chip_is_a_family
+ *
+ * DESCRIPTION: utility function to check if the host is A family chip
+ *
+ * PARAMETERS :
+ *
+ * RETURN : TRUE if A family.
+ * FALSE otherwise.
+ *==========================================================================*/
+uint8_t mm_camera_util_chip_is_a_family(void)
+{
+#ifdef USE_A_FAMILY
+ return TRUE;
+#else
+ return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_dispatch_app_event
+ *
+ * DESCRIPTION: dispatch event to apps who regitster for event notify
+ *
+ * PARAMETERS :
+ * @cmd_cb: ptr to a struct storing event info
+ * @user_data: user data ptr (camera object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_camera_event_t *event = &cmd_cb->u.evt;
+ mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
+ if (NULL != my_obj) {
+ mm_camera_cmd_thread_name(my_obj->evt_thread.threadName);
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(my_obj->evt.evt[i].evt_cb) {
+ my_obj->evt.evt[i].evt_cb(
+ my_obj->my_hdl,
+ event,
+ my_obj->evt.evt[i].user_data);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_event_notify
+ *
+ * DESCRIPTION: callback to handle event notify from kernel. This call will
+ * dequeue event from kernel.
+ *
+ * PARAMETERS :
+ * @user_data: user data ptr (camera object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_event_notify(void* user_data)
+{
+ struct v4l2_event ev;
+ struct msm_v4l2_event_data *msm_evt = NULL;
+ int rc;
+ mm_camera_event_t evt;
+ memset(&evt, 0, sizeof(mm_camera_event_t));
+
+ mm_camera_obj_t *my_obj = (mm_camera_obj_t*)user_data;
+ if (NULL != my_obj) {
+ /* read evt */
+ memset(&ev, 0, sizeof(ev));
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_DQEVENT, &ev);
+
+ if (rc >= 0 && ev.id == MSM_CAMERA_MSM_NOTIFY) {
+ msm_evt = (struct msm_v4l2_event_data *)ev.u.data;
+ switch (msm_evt->command) {
+ case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
+ evt.server_event_type = CAM_EVENT_TYPE_DAEMON_PULL_REQ;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ break;
+ case CAM_EVENT_TYPE_MAP_UNMAP_DONE:
+ pthread_mutex_lock(&my_obj->evt_lock);
+ my_obj->evt_rcvd.server_event_type = msm_evt->command;
+ my_obj->evt_rcvd.status = msm_evt->status;
+ pthread_cond_signal(&my_obj->evt_cond);
+ pthread_mutex_unlock(&my_obj->evt_lock);
+ break;
+ case CAM_EVENT_TYPE_INT_TAKE_JPEG:
+ case CAM_EVENT_TYPE_INT_TAKE_RAW:
+ {
+ evt.server_event_type = msm_evt->command;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ }
+ break;
+ case MSM_CAMERA_PRIV_SHUTDOWN:
+ {
+ LOGE("Camera Event DAEMON DIED received");
+ evt.server_event_type = CAM_EVENT_TYPE_DAEMON_DIED;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ }
+ break;
+ case CAM_EVENT_TYPE_CAC_DONE:
+ {
+ evt.server_event_type = CAM_EVENT_TYPE_CAC_DONE;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_enqueue_evt
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ * event thread.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @event : event to be queued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+ mm_camera_event_t *event)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t *node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_EVT_CB;
+ node->u.evt = *event;
+
+ /* enqueue to evt cmd thread */
+ cam_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
+ /* wake up evt cmd thread */
+ cam_sem_post(&(my_obj->evt_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_open
+ *
+ * DESCRIPTION: open a camera
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_open(mm_camera_obj_t *my_obj)
+{
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+ int32_t rc = 0;
+ int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+ uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+ int cam_idx = 0;
+ const char *dev_name_value = NULL;
+ int l_errno = 0;
+
+ LOGD("begin\n");
+
+ if (NULL == my_obj) {
+ goto on_error;
+ }
+ dev_name_value = mm_camera_util_get_dev_name(my_obj->my_hdl);
+ if (NULL == dev_name_value) {
+ goto on_error;
+ }
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+ dev_name_value);
+ sscanf(dev_name, "/dev/video%d", &cam_idx);
+ LOGD("dev name = %s, cam_idx = %d", dev_name, cam_idx);
+
+ do{
+ n_try--;
+ errno = 0;
+ my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ l_errno = errno;
+ LOGD("ctrl_fd = %d, errno == %d", my_obj->ctrl_fd, l_errno);
+ if((my_obj->ctrl_fd >= 0) || (errno != EIO && errno != ETIMEDOUT) || (n_try <= 0 )) {
+ LOGH("opened, break out while loop");
+ break;
+ }
+ LOGE("Failed with %s error, retrying after %d milli-seconds",
+ strerror(errno), sleep_msec);
+ usleep(sleep_msec * 1000U);
+ }while (n_try > 0);
+
+ if (my_obj->ctrl_fd < 0) {
+ LOGE("cannot open control fd of '%s' (%s)\n",
+ dev_name, strerror(l_errno));
+ if (l_errno == EBUSY)
+ rc = -EUSERS;
+ else
+ rc = -1;
+ goto on_error;
+ }
+
+ /* open domain socket*/
+ n_try = MM_CAMERA_DEV_OPEN_TRIES;
+ do {
+ n_try--;
+ my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
+ l_errno = errno;
+ LOGD("ds_fd = %d, errno = %d", my_obj->ds_fd, l_errno);
+ if((my_obj->ds_fd >= 0) || (n_try <= 0 )) {
+ LOGD("opened, break out while loop");
+ break;
+ }
+ LOGD("failed with I/O error retrying after %d milli-seconds",
+ sleep_msec);
+ usleep(sleep_msec * 1000U);
+ } while (n_try > 0);
+
+ if (my_obj->ds_fd < 0) {
+ LOGE("cannot open domain socket fd of '%s'(%s)\n",
+ dev_name, strerror(l_errno));
+ rc = -1;
+ goto on_error;
+ }
+ pthread_mutex_init(&my_obj->msg_lock, NULL);
+
+ pthread_mutex_init(&my_obj->cb_lock, NULL);
+ pthread_mutex_init(&my_obj->evt_lock, NULL);
+ pthread_cond_init(&my_obj->evt_cond, NULL);
+
+ LOGD("Launch evt Thread in Cam Open");
+ snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Dispatch");
+ mm_camera_cmd_thread_launch(&my_obj->evt_thread,
+ mm_camera_dispatch_app_event,
+ (void *)my_obj);
+
+ /* launch event poll thread
+ * we will add evt fd into event poll thread upon user first register for evt */
+ LOGD("Launch evt Poll Thread in Cam Open");
+ snprintf(my_obj->evt_poll_thread.threadName, THREAD_NAME_SIZE, "CAM_evntPoll");
+ mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
+ MM_CAMERA_POLL_TYPE_EVT);
+ mm_camera_evt_sub(my_obj, TRUE);
+
+ /* unlock cam_lock, we need release global intf_lock in camera_open(),
+ * in order not block operation of other Camera in dual camera use case.*/
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ LOGD("end (rc = %d)\n", rc);
+ return rc;
+
+on_error:
+
+ if (NULL == dev_name_value) {
+ LOGE("Invalid device name\n");
+ rc = -1;
+ }
+
+ if (NULL == my_obj) {
+ LOGE("Invalid camera object\n");
+ rc = -1;
+ } else {
+ if (my_obj->ctrl_fd >= 0) {
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ }
+ if (my_obj->ds_fd >= 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = -1;
+ }
+ }
+
+ /* unlock cam_lock, we need release global intf_lock in camera_open(),
+ * in order not block operation of other Camera in dual camera use case.*/
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_close
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ * event thread.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @event : event to be queued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+ LOGD("unsubscribe evt");
+ mm_camera_evt_sub(my_obj, FALSE);
+
+ LOGD("Close evt Poll Thread in Cam Close");
+ mm_camera_poll_thread_release(&my_obj->evt_poll_thread);
+
+ LOGD("Close evt cmd Thread in Cam Close");
+ mm_camera_cmd_thread_release(&my_obj->evt_thread);
+
+ if(my_obj->ctrl_fd >= 0) {
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ }
+ if(my_obj->ds_fd >= 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = -1;
+ }
+ pthread_mutex_destroy(&my_obj->msg_lock);
+
+ pthread_mutex_destroy(&my_obj->cb_lock);
+ pthread_mutex_destroy(&my_obj->evt_lock);
+ pthread_cond_destroy(&my_obj->evt_cond);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_register_event_notify_internal
+ *
+ * DESCRIPTION: internal implementation for registering callback for event notify.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @evt_cb : callback to be registered to handle event notify
+ * @user_data: user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data)
+{
+ int i;
+ int rc = -1;
+ mm_camera_evt_obj_t *evt_array = NULL;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ evt_array = &my_obj->evt;
+ if(evt_cb) {
+ /* this is reg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == NULL) {
+ evt_array->evt[i].evt_cb = evt_cb;
+ evt_array->evt[i].user_data = user_data;
+ evt_array->reg_count++;
+ rc = 0;
+ break;
+ }
+ }
+ } else {
+ /* this is unreg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == user_data) {
+ evt_array->evt[i].evt_cb = NULL;
+ evt_array->evt[i].user_data = NULL;
+ evt_array->reg_count--;
+ rc = 0;
+ break;
+ }
+ }
+ }
+
+ pthread_mutex_unlock(&my_obj->cb_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_register_event_notify
+ *
+ * DESCRIPTION: registering a callback for event notify.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @evt_cb : callback to be registered to handle event notify
+ * @user_data: user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data)
+{
+ int rc = -1;
+ rc = mm_camera_register_event_notify_internal(my_obj,
+ evt_cb,
+ user_data);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @buf : buf ptr to be enqueued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf)
+{
+ int rc = -1;
+ mm_channel_t * ch_obj = NULL;
+ ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ /* we always assume qbuf will be done before channel/stream is fully stopped
+ * because qbuf is done within dataCB context
+ * in order to avoid deadlock, we are not locking ch_lock for qbuf */
+ if (NULL != ch_obj) {
+ rc = mm_channel_qbuf(ch_obj, buf);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream id
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, uint32_t stream_id)
+{
+ int rc = -1;
+ mm_channel_t * ch_obj = NULL;
+ uint32_t payload;
+ ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ payload = stream_id;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ * @my_obj: camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_capability cap;
+
+ /* get camera capabilities */
+ memset(&cap, 0, sizeof(cap));
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_QUERYCAP, &cap);
+ if (rc != 0) {
+ LOGE("cannot get camera capabilities, rc = %d, errno %d",
+ rc, errno);
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (parms != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (parms != NULL) {
+ rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call success, we will always assume there will
+ * be an auto_focus event following up.
+ *==========================================================================*/
+int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_DO_AUTO_FOCUS, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_CANCEL_AUTO_FOCUS, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @do_af_flag : flag indicating if AF is needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+ int32_t do_af_flag)
+{
+ int32_t rc = -1;
+ int32_t value = do_af_flag;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PREPARE_SNAPSHOT, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_START_ZSL_SNAPSHOT, &value);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl capture
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_STOP_ZSL_SNAPSHOT, &value);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_flush
+ *
+ * DESCRIPTION: flush the current camera state and buffers
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_FLUSH, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @attr : bundle attribute of the channel if needed
+ * @channel_cb : callback function for bundle data notify
+ * @userdata : user data ptr
+ *
+ * RETURN : uint32_t type of channel handle
+ * 0 -- invalid channel handle, meaning the op failed
+ * >0 -- successfully added a channel with a valid handle
+ * NOTE : if no bundle data notify is needed, meaning each stream in the
+ * channel will have its own stream data notify callback, then
+ * attr, channel_cb, and userdata can be NULL. In this case,
+ * no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ mm_channel_t *ch_obj = NULL;
+ uint8_t ch_idx = 0;
+ uint32_t ch_hdl = 0;
+
+ for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
+ if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
+ ch_obj = &my_obj->ch[ch_idx];
+ break;
+ }
+ }
+
+ if (NULL != ch_obj) {
+ /* initialize channel obj */
+ memset(ch_obj, 0, sizeof(mm_channel_t));
+ ch_hdl = mm_camera_util_generate_handler(ch_idx);
+ ch_obj->my_hdl = ch_hdl;
+ ch_obj->state = MM_CHANNEL_STATE_STOPPED;
+ ch_obj->cam_obj = my_obj;
+ pthread_mutex_init(&ch_obj->ch_lock, NULL);
+ ch_obj->sessionid = my_obj->sessionid;
+ mm_channel_init(ch_obj, attr, channel_cb, userdata);
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ return ch_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DELETE,
+ NULL,
+ NULL);
+
+ pthread_mutex_destroy(&ch_obj->ch_lock);
+ memset(ch_obj, 0, sizeof(mm_channel_t));
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @bundle_info : bundle info to be filled in
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ cam_bundle_config_t *bundle_info)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+ (void *)bundle_info,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_link_stream
+ *
+ * DESCRIPTION: link a stream into a channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream that will be linked
+ * @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint32_t linked_ch_id)
+{
+ uint32_t s_hdl = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, linked_ch_id);
+ mm_channel_t * owner_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if ((NULL != ch_obj) && (NULL != owner_obj)) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ mm_camera_stream_link_t stream_link;
+ memset(&stream_link, 0, sizeof(mm_camera_stream_link_t));
+ stream_link.ch = owner_obj;
+ stream_link.stream_id = stream_id;
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_LINK_STREAM,
+ (void*)&stream_link,
+ (void*)&s_hdl);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ uint32_t s_hdl = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_ADD_STREAM,
+ NULL,
+ (void *)&s_hdl);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DEL_STREAM,
+ (void *)&stream_id,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_start_zsl_snapshot_ch
+ *
+ * DESCRIPTION: starts zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_stop_zsl_snapshot_ch
+ *
+ * DESCRIPTION: stops zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ mm_evt_paylod_config_stream_t payload;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_paylod_config_stream_t));
+ payload.stream_id = stream_id;
+ payload.config = config;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CONFIG_STREAM,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_START,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_STOP,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ * frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @num_buf_requested : number of matched frames needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, mm_camera_req_buf_t *buf)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if ((NULL != ch_obj) && (buf != NULL)) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+ (void *)buf, NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ * of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj, uint32_t ch_id,
+ uint32_t frame_idx)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+ (void *)&frame_idx,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_config_channel_notify
+ *
+ * DESCRIPTION: configures the channel notification mode
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_super_buf_notify_mode_t notify_mode)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+ (void *)&notify_mode,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_set_get_stream_parms_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = s_id;
+ payload.parms = parms;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_SET_STREAM_PARM,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_set_get_stream_parms_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = s_id;
+ payload.parms = parms;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_STREAM_PARM,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ * if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @actions : ptr to an action struct buf to be performed by server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the action struct buf is already mapped to server via
+ * domain socket. Actions to be performed by server are already
+ * filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ void *actions)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_do_stream_action_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = stream_id;
+ payload.actions = actions;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DO_STREAM_ACTION,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size)
+{
+ int32_t rc = -1;
+ cam_buf_map_type payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = stream_id;
+ payload.type = buf_type;
+ payload.frame_idx = buf_idx;
+ payload.plane_idx = plane_idx;
+ payload.fd = fd;
+ payload.size = size;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_MAP_STREAM_BUF,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_bufs(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ int32_t rc = -1;
+ cam_buf_map_type_list payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memcpy(&payload, buf_map_list, sizeof(payload));
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_MAP_STREAM_BUFS,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx)
+{
+ int32_t rc = -1;
+ cam_buf_unmap_type payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = stream_id;
+ payload.type = buf_type;
+ payload.frame_idx = buf_idx;
+ payload.plane_idx = plane_idx;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_evt_sub
+ *
+ * DESCRIPTION: subscribe/unsubscribe event notify from kernel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @reg_flag : 1 -- subscribe ; 0 -- unsubscribe
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ uint8_t reg_flag)
+{
+ int32_t rc = 0;
+ struct v4l2_event_subscription sub;
+
+ memset(&sub, 0, sizeof(sub));
+ sub.type = MSM_CAMERA_V4L2_EVENT_TYPE;
+ sub.id = MSM_CAMERA_MSM_NOTIFY;
+ if(FALSE == reg_flag) {
+ /* unsubscribe */
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+ if (rc < 0) {
+ LOGE("unsubscribe event rc = %d, errno %d",
+ rc, errno);
+ return rc;
+ }
+ /* remove evt fd from the polling thraed when unreg the last event */
+ rc = mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread,
+ my_obj->my_hdl,
+ mm_camera_sync_call);
+ } else {
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ if (rc < 0) {
+ LOGE("subscribe event rc = %d, errno %d",
+ rc, errno);
+ return rc;
+ }
+ /* add evt fd to polling thread when subscribe the first event */
+ rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
+ my_obj->my_hdl,
+ my_obj->ctrl_fd,
+ mm_camera_event_notify,
+ (void*)my_obj,
+ mm_camera_sync_call);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_wait_for_event
+ *
+ * DESCRIPTION: utility function to wait for certain events
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @evt_mask : mask for events to be waited. Any of event in the mask would
+ * trigger the wait to end
+ * @status : status of the event
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_camera_util_wait_for_event(mm_camera_obj_t *my_obj,
+ uint32_t evt_mask,
+ uint32_t *status)
+{
+ int32_t rc = 0;
+ struct timespec ts;
+
+ pthread_mutex_lock(&my_obj->evt_lock);
+ while (!(my_obj->evt_rcvd.server_event_type & evt_mask)) {
+ clock_gettime(CLOCK_REALTIME, &ts);
+ ts.tv_sec += WAIT_TIMEOUT;
+ rc = pthread_cond_timedwait(&my_obj->evt_cond, &my_obj->evt_lock, &ts);
+ if (rc) {
+ LOGE("pthread_cond_timedwait of evt_mask 0x%x failed %d",
+ evt_mask, rc);
+ break;
+ }
+ }
+ if (!rc) {
+ *status = my_obj->evt_rcvd.status;
+ } else {
+ *status = MSM_CAMERA_STATUS_FAIL;
+ }
+ /* reset local storage for recieved event for next event */
+ memset(&my_obj->evt_rcvd, 0, sizeof(mm_camera_event_t));
+ pthread_mutex_unlock(&my_obj->evt_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_bundled_sendmsg
+ *
+ * DESCRIPTION: utility function to send bundled msg via domain socket
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @msg : message to be sent
+ * @buf_size : size of the message to be sent
+ * @sendfds : array of file descriptors to be sent
+ * @numfds : number of file descriptors to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ size_t buf_size,
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+ int numfds)
+{
+ int32_t rc = -1;
+ uint32_t status;
+
+ /* need to lock msg_lock, since sendmsg until response back is deemed as one operation*/
+ pthread_mutex_lock(&my_obj->msg_lock);
+ if(mm_camera_socket_bundle_sendmsg(my_obj->ds_fd, msg, buf_size, sendfds, numfds) > 0) {
+ /* wait for event that mapping/unmapping is done */
+ mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+ if (MSM_CAMERA_STATUS_SUCCESS == status) {
+ rc = 0;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->msg_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_sendmsg
+ *
+ * DESCRIPTION: utility function to send msg via domain socket
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @msg : message to be sent
+ * @buf_size : size of the message to be sent
+ * @sendfd : >0 if any file descriptor need to be passed across process
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ size_t buf_size,
+ int sendfd)
+{
+ int32_t rc = -1;
+ uint32_t status;
+
+ /* need to lock msg_lock, since sendmsg until reposonse back is deemed as one operation*/
+ pthread_mutex_lock(&my_obj->msg_lock);
+ if(mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd) > 0) {
+ /* wait for event that mapping/unmapping is done */
+ mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+ if (MSM_CAMERA_STATUS_SUCCESS == status) {
+ rc = 0;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->msg_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+ uint8_t buf_type,
+ int fd,
+ size_t size)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+ packet.payload.buf_map.type = buf_type;
+ packet.payload.buf_map.fd = fd;
+ packet.payload.buf_map.size = size;
+ rc = mm_camera_util_sendmsg(my_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ fd);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_bufs
+ *
+ * DESCRIPTION: mapping camera buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_bufs(mm_camera_obj_t *my_obj,
+ const cam_buf_map_type_list* buf_map_list)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
+
+ memcpy(&packet.payload.buf_map_list, buf_map_list,
+ sizeof(packet.payload.buf_map_list));
+
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
+ uint32_t numbufs = packet.payload.buf_map_list.length;
+ uint32_t i;
+ for (i = 0; i < numbufs; i++) {
+ sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
+ }
+
+ for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
+ packet.payload.buf_map_list.buf_maps[i].fd = -1;
+ sendfds[i] = -1;
+ }
+
+ rc = mm_camera_util_bundled_sendmsg(my_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ sendfds,
+ numbufs);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+ uint8_t buf_type)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+ packet.payload.buf_unmap.type = buf_type;
+ rc = mm_camera_util_sendmsg(my_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ -1);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_s_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for s_ctrl
+ *
+ * PARAMETERS :
+ * @fd : file descritpor for sending ioctl
+ * @id : control id
+ * @value : value of the ioctl to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_s_ctrl(int32_t fd, uint32_t id, int32_t *value)
+{
+ int rc = 0;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ if (value != NULL) {
+ control.value = *value;
+ }
+ rc = ioctl(fd, VIDIOC_S_CTRL, &control);
+
+ LOGD("fd=%d, S_CTRL, id=0x%x, value = %p, rc = %d\n",
+ fd, id, value, rc);
+ if (rc < 0) {
+ LOGE("ioctl failed %d, errno %d", rc, errno);
+ } else if (value != NULL) {
+ *value = control.value;
+ }
+ return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_g_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for g_ctrl
+ *
+ * PARAMETERS :
+ * @fd : file descritpor for sending ioctl
+ * @id : control id
+ * @value : value of the ioctl to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+ int rc = 0;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ if (value != NULL) {
+ control.value = *value;
+ }
+ rc = ioctl(fd, VIDIOC_G_CTRL, &control);
+ LOGD("fd=%d, G_CTRL, id=0x%x, rc = %d\n", fd, id, rc);
+ if (value != NULL) {
+ *value = control.value;
+ }
+ return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_channel_advanced_capture
+ *
+ * DESCRIPTION: sets the channel advanced capture
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @type : advanced capture type.
+ * @start_flag : flag to indicate start/stop
+ * @in_value : input configaration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, mm_camera_advanced_capture_t type,
+ uint32_t trigger, void *in_value)
+{
+ LOGD("E type = %d", type);
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ switch (type) {
+ case MM_CAMERA_AF_BRACKETING:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_AF_BRACKETING,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_AE_BRACKETING:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_AE_BRACKETING,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_FLASH_BRACKETING:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_FLASH_BRACKETING,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_ZOOM_1X:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_ZOOM_1X,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_FRAME_CAPTURE:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CAMERA_EVT_CAPTURE_SETTING,
+ (void *)in_value,
+ NULL);
+ break;
+ default:
+ break;
+ }
+
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ LOGD("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_session_id
+ *
+ * DESCRIPTION: get the session identity
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @sessionid: pointer to the output session id
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call succeeds, we will get a valid session id
+ *==========================================================================*/
+int32_t mm_camera_get_session_id(mm_camera_obj_t *my_obj,
+ uint32_t* sessionid)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if(sessionid != NULL) {
+ rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd,
+ MSM_CAMERA_PRIV_G_SESSION_ID, &value);
+ LOGD("fd=%d, get_session_id, id=0x%x, value = %d, rc = %d\n",
+ my_obj->ctrl_fd, MSM_CAMERA_PRIV_G_SESSION_ID,
+ value, rc);
+ *sessionid = value;
+ my_obj->sessionid = value;
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_sync_related_sensors
+ *
+ * DESCRIPTION: send sync cmd
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @parms : ptr to the related cam info to be sent to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the sync struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_sync_related_sensors(mm_camera_obj_t *my_obj,
+ cam_sync_related_sensors_event_info_t* parms)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (parms != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_SYNC_RELATED_SENSORS, &value);
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_reg_stream_buf_cb
+ *
+ * DESCRIPTION: Register callback for stream buffer
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream that will be linked
+ * @buf_cb : special callback needs to be registered for stream buffer
+ * @cb_type : Callback type SYNC/ASYNC
+ * @userdata : user data pointer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * 1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t stream_cb,
+ mm_camera_stream_cb_type cb_type, void *userdata)
+{
+ int rc = 0;
+ mm_stream_data_cb_t buf_cb;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&buf_cb, 0, sizeof(mm_stream_data_cb_t));
+ buf_cb.cb = stream_cb;
+ buf_cb.cb_count = -1;
+ buf_cb.cb_type = cb_type;
+ buf_cb.user_data = userdata;
+
+ mm_evt_paylod_reg_stream_buf_cb payload;
+ memset(&payload, 0, sizeof(mm_evt_paylod_reg_stream_buf_cb));
+ payload.buf_cb = buf_cb;
+ payload.stream_id = stream_id;
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_REG_STREAM_BUF_CB,
+ (void*)&payload, NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+#ifdef QCAMERA_REDEFINE_LOG
+
+/*===========================================================================
+ * DESCRIPTION: mm camera debug interface
+ *
+ *==========================================================================*/
+pthread_mutex_t dbg_log_mutex;
+
+#undef LOG_TAG
+#define LOG_TAG "QCamera"
+#define CDBG_MAX_STR_LEN 1024
+#define CDBG_MAX_LINE_LENGTH 256
+
+/* current trace loggin permissions
+ * {NONE, ERR, WARN, HIGH, DEBUG, LOW, INFO} */
+int g_cam_log[CAM_LAST_MODULE][CAM_GLBL_DBG_INFO + 1] = {
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_NO_MODULE */
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_HAL_MODULE */
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_MCI_MODULE */
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_JPEG_MODULE */
+};
+
+/* string representation for logging level */
+static const char *cam_dbg_level_to_str[] = {
+ "", /* CAM_GLBL_DBG_NONE */
+ "<ERROR>", /* CAM_GLBL_DBG_ERR */
+ "<WARN>", /* CAM_GLBL_DBG_WARN */
+ "<HIGH>", /* CAM_GLBL_DBG_HIGH */
+ "<DBG>", /* CAM_GLBL_DBG_DEBUG */
+ "<LOW>", /* CAM_GLBL_DBG_LOW */
+ "<INFO>" /* CAM_GLBL_DBG_INFO */
+};
+
+/* current trace logging configuration */
+typedef struct {
+ cam_global_debug_level_t level;
+ int initialized;
+ const char *name;
+ const char *prop;
+} module_debug_t;
+
+static module_debug_t cam_loginfo[(int)CAM_LAST_MODULE] = {
+ {CAM_GLBL_DBG_ERR, 1,
+ "", "persist.camera.global.debug" }, /* CAM_NO_MODULE */
+ {CAM_GLBL_DBG_ERR, 1,
+ "<HAL>", "persist.camera.hal.debug" }, /* CAM_HAL_MODULE */
+ {CAM_GLBL_DBG_ERR, 1,
+ "<MCI>", "persist.camera.mci.debug" }, /* CAM_MCI_MODULE */
+ {CAM_GLBL_DBG_ERR, 1,
+ "<JPEG>", "persist.camera.mmstill.logs" }, /* CAM_JPEG_MODULE */
+};
+
+/** cam_get_dbg_level
+ *
+ * @module: module name
+ * @level: module debug logging level
+ *
+ * Maps debug log string to value.
+ *
+ * Return: logging level
+ **/
+__unused
+static cam_global_debug_level_t cam_get_dbg_level(const char *module,
+ char *pValue) {
+
+ cam_global_debug_level_t rc = CAM_GLBL_DBG_NONE;
+
+ if (!strcmp(pValue, "none")) {
+ rc = CAM_GLBL_DBG_NONE;
+ } else if (!strcmp(pValue, "warn")) {
+ rc = CAM_GLBL_DBG_WARN;
+ } else if (!strcmp(pValue, "debug")) {
+ rc = CAM_GLBL_DBG_DEBUG;
+ } else if (!strcmp(pValue, "error")) {
+ rc = CAM_GLBL_DBG_ERR;
+ } else if (!strcmp(pValue, "low")) {
+ rc = CAM_GLBL_DBG_LOW;
+ } else if (!strcmp(pValue, "high")) {
+ rc = CAM_GLBL_DBG_HIGH;
+ } else if (!strcmp(pValue, "info")) {
+ rc = CAM_GLBL_DBG_INFO;
+ } else {
+ ALOGE("Invalid %s debug log level %s\n", module, pValue);
+ }
+
+ ALOGD("%s debug log level: %s\n", module, cam_dbg_level_to_str[rc]);
+
+ return rc;
+}
+
+/** cam_vsnprintf
+ * @pdst: destination buffer pointer
+ * @size: size of destination b uffer
+ * @pfmt: string format
+ * @argptr: variabkle length argument list
+ *
+ * Processes variable length argument list to a formatted string.
+ *
+ * Return: n/a
+ **/
+static void cam_vsnprintf(char* pdst, unsigned int size,
+ const char* pfmt, va_list argptr) {
+ int num_chars_written = 0;
+
+ pdst[0] = '\0';
+ num_chars_written = vsnprintf(pdst, size, pfmt, argptr);
+
+ if ((num_chars_written >= (int)size) && (size > 0)) {
+ /* Message length exceeds the buffer limit size */
+ num_chars_written = size - 1;
+ pdst[size - 1] = '\0';
+ }
+}
+
+/** mm_camera_debug_log
+ * @module: origin or log message
+ * @level: logging level
+ * @func: caller function name
+ * @line: caller line number
+ * @fmt: log message formatting string
+ * @...: variable argument list
+ *
+ * Generig logger method.
+ *
+ * Return: N/A
+ **/
+void mm_camera_debug_log(const cam_modules_t module,
+ const cam_global_debug_level_t level,
+ const char *func, const int line, const char *fmt, ...) {
+ char str_buffer[CDBG_MAX_STR_LEN];
+ va_list args;
+
+ va_start(args, fmt);
+ cam_vsnprintf(str_buffer, CDBG_MAX_STR_LEN, fmt, args);
+ va_end(args);
+
+ switch (level) {
+ case CAM_GLBL_DBG_WARN:
+ ALOGW("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ break;
+ case CAM_GLBL_DBG_ERR:
+ ALOGE("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ break;
+ case CAM_GLBL_DBG_INFO:
+ ALOGI("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ break;
+ case CAM_GLBL_DBG_HIGH:
+ case CAM_GLBL_DBG_DEBUG:
+ case CAM_GLBL_DBG_LOW:
+ default:
+ ALOGD("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ }
+}
+
+ /** mm_camera_set_dbg_log_properties
+ *
+ * Set global and module log level properties.
+ *
+ * Return: N/A
+ **/
+void mm_camera_set_dbg_log_properties(void) {
+ int i;
+ unsigned int j;
+ static int boot_init = 1;
+ char property_value[PROPERTY_VALUE_MAX] = {0};
+ char default_value[PROPERTY_VALUE_MAX] = {0};
+
+ if (boot_init) {
+ boot_init = 0;
+ pthread_mutex_init(&dbg_log_mutex, 0);
+ }
+
+ /* set global and individual module logging levels */
+ pthread_mutex_lock(&dbg_log_mutex);
+ for (i = CAM_NO_MODULE; i < CAM_LAST_MODULE; i++) {
+ cam_global_debug_level_t log_level;
+ snprintf(default_value, PROPERTY_VALUE_MAX, "%d", (int)cam_loginfo[i].level);
+ property_get(cam_loginfo[i].prop, property_value, default_value);
+ log_level = (cam_global_debug_level_t)atoi(property_value);
+
+ /* fix KW warnings */
+ if (log_level > CAM_GLBL_DBG_INFO) {
+ log_level = CAM_GLBL_DBG_INFO;
+ }
+
+ cam_loginfo[i].level = log_level;
+
+ /* The logging macros will produce a log message when logging level for
+ * a module is less or equal to the level specified in the property for
+ * the module, or less or equal the level specified by the global logging
+ * property. Currently we don't allow INFO logging to be turned off */
+ for (j = CAM_GLBL_DBG_ERR; j <= CAM_GLBL_DBG_LOW; j++) {
+ g_cam_log[i][j] = (cam_loginfo[CAM_NO_MODULE].level != CAM_GLBL_DBG_NONE) &&
+ (cam_loginfo[i].level != CAM_GLBL_DBG_NONE) &&
+ ((j <= cam_loginfo[i].level) ||
+ (j <= cam_loginfo[CAM_NO_MODULE].level));
+ }
+ }
+ pthread_mutex_unlock(&dbg_log_mutex);
+}
+
+#endif
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
new file mode 100644
index 0000000..7807534
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -0,0 +1,3639 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <fcntl.h>
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler);
+extern mm_channel_t * mm_camera_util_get_channel_by_handler(mm_camera_obj_t * cam_obj,
+ uint32_t handler);
+/* Static frame sync info used between different camera channels*/
+static mm_channel_frame_sync_info_t fs = { .num_cam =0, .pos = 0};
+/* Frame sync info access lock */
+static pthread_mutex_t fs_lock = PTHREAD_MUTEX_INITIALIZER;
+
+/* internal function declare goes here */
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf);
+int32_t mm_channel_init(mm_channel_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata);
+void mm_channel_release(mm_channel_t *my_obj);
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj);
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+ uint32_t stream_id);
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+ mm_camera_stream_link_t *stream_link);
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+ cam_bundle_config_t *bundle_info);
+int32_t mm_channel_start(mm_channel_t *my_obj);
+int32_t mm_channel_stop(mm_channel_t *my_obj);
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+ mm_camera_req_buf_t *buf);
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj);
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj,
+ uint32_t frame_idx,
+ cam_stream_type_t stream_type);
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+ mm_camera_super_buf_notify_mode_t notify_mode);
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue, cam_stream_type_t cam_type);
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj,
+ uint32_t stream_id);
+
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+ mm_evt_paylod_do_stream_action_t *payload);
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+ cam_buf_map_type *payload);
+int32_t mm_channel_map_stream_bufs(mm_channel_t *my_obj,
+ cam_buf_map_type_list *payload);
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+ cam_buf_unmap_type *payload);
+
+/* state machine function declare */
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+
+/* channel super queue functions */
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_comp_and_enqueue(mm_channel_t *ch_obj,
+ mm_channel_queue_t * queue,
+ mm_camera_buf_info_t *buf);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(
+ mm_channel_queue_t * queue, mm_channel_t *ch_obj);
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t *my_obj,
+ mm_channel_queue_t *queue);
+int32_t mm_channel_superbuf_skip(mm_channel_t *my_obj,
+ mm_channel_queue_t *queue);
+
+static int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+ mm_camera_generic_cmd_t *p_gen_cmd);
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue);
+
+/* Start of Frame Sync util methods */
+void mm_frame_sync_reset();
+int32_t mm_frame_sync_register_channel(mm_channel_t *ch_obj);
+int32_t mm_frame_sync_unregister_channel(mm_channel_t *ch_obj);
+int32_t mm_frame_sync_add(uint32_t frame_id, mm_channel_t *ch_obj);
+int32_t mm_frame_sync_remove(uint32_t frame_id);
+uint32_t mm_frame_sync_find_matched(uint8_t oldest);
+int8_t mm_frame_sync_find_frame_index(uint32_t frame_id);
+void mm_frame_sync_lock_queues();
+void mm_frame_sync_unlock_queues();
+void mm_channel_node_qbuf(mm_channel_t *ch_obj, mm_channel_queue_node_t *node);
+/* End of Frame Sync Util methods */
+void mm_channel_send_super_buf(mm_channel_node_info_t *info);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_frame_internal(
+ mm_channel_queue_t * queue, uint32_t frame_idx);
+
+/*===========================================================================
+ * FUNCTION : mm_channel_util_get_stream_by_handler
+ *
+ * DESCRIPTION: utility function to get a stream object from its handle
+ *
+ * PARAMETERS :
+ * @cam_obj: ptr to a channel object
+ * @handler: stream handle
+ *
+ * RETURN : ptr to a stream object.
+ * NULL if failed.
+ *==========================================================================*/
+mm_stream_t * mm_channel_util_get_stream_by_handler(
+ mm_channel_t * ch_obj,
+ uint32_t handler)
+{
+ int i;
+ mm_stream_t *s_obj = NULL;
+ for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
+ (handler == ch_obj->streams[i].my_hdl)) {
+ s_obj = &ch_obj->streams[i];
+ break;
+ }
+ }
+ return s_obj;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_dispatch_super_buf
+ *
+ * DESCRIPTION: dispatch super buffer of bundle to registered user
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing matched super buf information
+ * @userdata: user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_channel_dispatch_super_buf(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ mm_channel_t * my_obj = (mm_channel_t *)user_data;
+
+ if (NULL == my_obj) {
+ return;
+ }
+
+ if (MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB != cmd_cb->cmd_type) {
+ LOGE("Wrong cmd_type (%d) for super buf dataCB",
+ cmd_cb->cmd_type);
+ return;
+ }
+
+ if (my_obj->bundle.super_buf_notify_cb) {
+ my_obj->bundle.super_buf_notify_cb(&cmd_cb->u.superbuf, my_obj->bundle.user_data);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_process_stream_buf
+ *
+ * DESCRIPTION: handle incoming buffer from stream in a bundle. In this function,
+ * matching logic will be performed on incoming stream frames.
+ * Will depends on the bundle attribute, either storing matched frames
+ * in the superbuf queue, or sending matched superbuf frames to upper
+ * layer through registered callback.
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing matched super buf information
+ * @userdata: user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_channel_process_stream_buf(mm_camera_cmdcb_t * cmd_cb,
+ void *user_data)
+{
+ mm_camera_super_buf_notify_mode_t notify_mode;
+ mm_channel_queue_node_t *node = NULL;
+ mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+ uint32_t i = 0;
+ /* Set expected frame id to a future frame idx, large enough to wait
+ * for good_frame_idx_range, and small enough to still capture an image */
+ uint8_t needStartZSL = FALSE;
+
+ if (NULL == ch_obj) {
+ return;
+ }
+ if (MM_CAMERA_CMD_TYPE_DATA_CB == cmd_cb->cmd_type) {
+ /* comp_and_enqueue */
+ mm_channel_superbuf_comp_and_enqueue(
+ ch_obj,
+ &ch_obj->bundle.superbuf_queue,
+ &cmd_cb->u.buf);
+ } else if (MM_CAMERA_CMD_TYPE_REQ_DATA_CB == cmd_cb->cmd_type) {
+ /* skip frames if needed */
+ ch_obj->pending_cnt = cmd_cb->u.req_buf.num_buf_requested;
+ ch_obj->pending_retro_cnt = cmd_cb->u.req_buf.num_retro_buf_requested;
+ ch_obj->req_type = cmd_cb->u.req_buf.type;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+
+ LOGH("pending cnt (%d), retro count (%d)"
+ "req_type (%d) is_primary (%d)",
+ ch_obj->pending_cnt, ch_obj->pending_retro_cnt,
+ ch_obj->req_type, cmd_cb->u.req_buf.primary_only);
+ if (!ch_obj->pending_cnt || (ch_obj->pending_retro_cnt > ch_obj->pending_cnt)) {
+ ch_obj->pending_retro_cnt = ch_obj->pending_cnt;
+ }
+ if (ch_obj->pending_retro_cnt > 0) {
+ LOGL("Resetting need Led Flash!!!");
+ ch_obj->needLEDFlash = 0;
+ }
+ ch_obj->stopZslSnapshot = 0;
+ ch_obj->unLockAEC = 0;
+
+ mm_channel_superbuf_skip(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+ } else if (MM_CAMERA_CMD_TYPE_START_ZSL == cmd_cb->cmd_type) {
+ ch_obj->manualZSLSnapshot = TRUE;
+ mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+ } else if (MM_CAMERA_CMD_TYPE_STOP_ZSL == cmd_cb->cmd_type) {
+ ch_obj->manualZSLSnapshot = FALSE;
+ mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+ } else if (MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY == cmd_cb->cmd_type) {
+ ch_obj->bundle.superbuf_queue.attr.notify_mode = cmd_cb->u.notify_mode;
+ } else if (MM_CAMERA_CMD_TYPE_FLUSH_QUEUE == cmd_cb->cmd_type) {
+ ch_obj->bundle.superbuf_queue.expected_frame_id = cmd_cb->u.flush_cmd.frame_idx;
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, cmd_cb->u.flush_cmd.stream_type);
+ cam_sem_post(&(ch_obj->cmd_thread.sync_sem));
+ return;
+ } else if (MM_CAMERA_CMD_TYPE_GENERAL == cmd_cb->cmd_type) {
+ LOGH("MM_CAMERA_CMD_TYPE_GENERAL");
+ switch (cmd_cb->u.gen_cmd.type) {
+ case MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING:
+ case MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMDTYPE_AF_BRACKETING %u",
+ start);
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+ if (start) {
+ LOGH("need AE bracketing, start zsl snapshot");
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX;
+ } else {
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+ }
+ }
+ break;
+ case MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMDTYPE_FLASH_BRACKETING %u",
+ start);
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+ if (start) {
+ LOGH("need flash bracketing");
+ ch_obj->isFlashBracketingEnabled = TRUE;
+ } else {
+ ch_obj->isFlashBracketingEnabled = FALSE;
+ }
+ }
+ break;
+ case MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X %u",
+ start);
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+ if (start) {
+ LOGH("need zoom 1x frame");
+ ch_obj->isZoom1xFrameRequested = TRUE;
+ } else {
+ ch_obj->isZoom1xFrameRequested = FALSE;
+ }
+ }
+ break;
+ case MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING %u num_batch = %d",
+ start, cmd_cb->u.gen_cmd.frame_config.num_batch);
+
+ if (start) {
+ memset(&ch_obj->frameConfig, 0, sizeof(cam_capture_frame_config_t));
+ for (i = 0; i < cmd_cb->u.gen_cmd.frame_config.num_batch; i++) {
+ if (cmd_cb->u.gen_cmd.frame_config.configs[i].type
+ != CAM_CAPTURE_RESET) {
+ ch_obj->frameConfig.configs[
+ ch_obj->frameConfig.num_batch] =
+ cmd_cb->u.gen_cmd.frame_config.configs[i];
+ ch_obj->frameConfig.num_batch++;
+ LOGH("capture setting frame = %d type = %d",
+ i,ch_obj->frameConfig.configs[
+ ch_obj->frameConfig.num_batch].type);
+ }
+ }
+ LOGD("Capture setting Batch Count %d",
+ ch_obj->frameConfig.num_batch);
+ ch_obj->isConfigCapture = TRUE;
+ } else {
+ ch_obj->isConfigCapture = FALSE;
+ memset(&ch_obj->frameConfig, 0, sizeof(cam_capture_frame_config_t));
+ }
+ ch_obj->cur_capture_idx = 0;
+ memset(ch_obj->capture_frame_id, 0, sizeof(uint8_t) * MAX_CAPTURE_BATCH_NUM);
+ break;
+ }
+ default:
+ LOGE("Error: Invalid command");
+ break;
+ }
+ }
+ notify_mode = ch_obj->bundle.superbuf_queue.attr.notify_mode;
+
+ /*Handle use case which does not need start ZSL even in unified case*/
+ if ((ch_obj->pending_cnt > 0)
+ && (ch_obj->isConfigCapture)
+ && (ch_obj->manualZSLSnapshot == FALSE)
+ && (ch_obj->startZSlSnapshotCalled == FALSE)) {
+ needStartZSL = TRUE;
+ for (i = ch_obj->cur_capture_idx;
+ i < ch_obj->frameConfig.num_batch;
+ i++) {
+ cam_capture_type type = ch_obj->frameConfig.configs[i].type;
+ if (((type == CAM_CAPTURE_FLASH) && (!ch_obj->needLEDFlash))
+ || ((type == CAM_CAPTURE_LOW_LIGHT) && (!ch_obj->needLowLightZSL))) {
+ /*For flash and low light capture, start ZSL is triggered only if needed*/
+ needStartZSL = FALSE;
+ break;
+ }
+ }
+ }
+
+ if ((ch_obj->isConfigCapture)
+ && (needStartZSL)) {
+ for (i = ch_obj->cur_capture_idx;
+ i < ch_obj->frameConfig.num_batch;
+ i++) {
+ ch_obj->capture_frame_id[i] =
+ ch_obj->bundle.superbuf_queue.expected_frame_id
+ + MM_CAMERA_MAX_FUTURE_FRAME_WAIT;
+ }
+
+ /* Need to Flush the queue and trigger frame config */
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+ LOGI("TRIGGER Start ZSL");
+ mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+ ch_obj->startZSlSnapshotCalled = TRUE;
+ ch_obj->burstSnapNum = ch_obj->pending_cnt;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ } else if ((ch_obj->pending_cnt > 0)
+ && ((ch_obj->needLEDFlash == TRUE) ||
+ (MM_CHANNEL_BRACKETING_STATE_OFF != ch_obj->bracketingState))
+ && (ch_obj->manualZSLSnapshot == FALSE)
+ && ch_obj->startZSlSnapshotCalled == FALSE) {
+
+ LOGI("TRIGGER Start ZSL for Flash");
+ mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+ ch_obj->startZSlSnapshotCalled = TRUE;
+ ch_obj->burstSnapNum = ch_obj->pending_cnt;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ } else if (((ch_obj->pending_cnt == 0) || (ch_obj->stopZslSnapshot == 1))
+ && (ch_obj->manualZSLSnapshot == FALSE)
+ && (ch_obj->startZSlSnapshotCalled == TRUE)) {
+ LOGI("TRIGGER Stop ZSL for cancel picture");
+ mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+ // Unlock AEC
+ ch_obj->startZSlSnapshotCalled = FALSE;
+ ch_obj->needLEDFlash = FALSE;
+ ch_obj->burstSnapNum = 0;
+ ch_obj->stopZslSnapshot = 0;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ ch_obj->unLockAEC = 1;
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+ ch_obj->isConfigCapture = FALSE;
+ }
+ /* bufdone for overflowed bufs */
+ mm_channel_superbuf_bufdone_overflow(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+ LOGD("Super Buffer received, pending_cnt=%d queue cnt = %d expected = %d",
+ ch_obj->pending_cnt, ch_obj->bundle.superbuf_queue.match_cnt,
+ ch_obj->bundle.superbuf_queue.expected_frame_id);
+
+ /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
+ while (((ch_obj->pending_cnt > 0) ||
+ (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode)) &&
+ (!ch_obj->bWaitForPrepSnapshotDone)) {
+
+ /* dequeue */
+ mm_channel_node_info_t info;
+ memset(&info, 0x0, sizeof(info));
+
+ if (ch_obj->req_type == MM_CAMERA_REQ_FRAME_SYNC_BUF) {
+ // Lock the Queues
+ mm_frame_sync_lock_queues();
+ uint32_t match_frame = mm_frame_sync_find_matched(FALSE);
+ if (match_frame) {
+ uint8_t j = 0;
+ for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+ if (fs.ch_obj[j]) {
+ mm_channel_queue_t *ch_queue =
+ &fs.ch_obj[j]->bundle.superbuf_queue;
+ if (ch_queue == NULL) {
+ LOGW("Channel queue is NULL");
+ break;
+ }
+ node = mm_channel_superbuf_dequeue_frame_internal(
+ ch_queue, match_frame);
+ if (node != NULL) {
+ info.ch_obj[info.num_nodes] = fs.ch_obj[j];
+ info.node[info.num_nodes] = node;
+ info.num_nodes++;
+ LOGH("Added ch(%p) to node ,num nodes %d",
+ fs.ch_obj[j], info.num_nodes);
+ }
+ }
+ }
+ mm_frame_sync_remove(match_frame);
+ LOGI("match frame %d", match_frame);
+ if (info.num_nodes != fs.num_cam) {
+ LOGI("num node %d != num cam (%d) Debug this",
+ info.num_nodes, fs.num_cam);
+ uint8_t j = 0;
+ // free super buffers from various nodes
+ for (j = 0; j < info.num_nodes; j++) {
+ if (info.node[j]) {
+ mm_channel_node_qbuf(info.ch_obj[j], info.node[j]);
+ free(info.node[j]);
+ }
+ }
+ // we should not use it as matched dual camera frames
+ info.num_nodes = 0;
+ }
+ }
+ mm_frame_sync_unlock_queues();
+ } else {
+ node = mm_channel_superbuf_dequeue(&ch_obj->bundle.superbuf_queue, ch_obj);
+ if (node != NULL) {
+ if (ch_obj->isConfigCapture &&
+ ((node->frame_idx <
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx]))) {
+ uint8_t i;
+ LOGD("Not expected super buffer. frameID = %d expected = %d",
+ node->frame_idx, ch_obj->capture_frame_id[ch_obj->cur_capture_idx]);
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ free(node);
+ } else {
+ info.num_nodes = 1;
+ info.ch_obj[0] = ch_obj;
+ info.node[0] = node;
+ }
+ }
+ }
+ if (info.num_nodes > 0) {
+ /* decrease pending_cnt */
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+ ch_obj->pending_cnt--;
+ if (ch_obj->pending_retro_cnt > 0) {
+ if (ch_obj->pending_retro_cnt == 1) {
+ ch_obj->bWaitForPrepSnapshotDone = 1;
+ }
+ ch_obj->pending_retro_cnt--;
+ }
+
+ if (((ch_obj->pending_cnt == 0) ||
+ (ch_obj->stopZslSnapshot == 1)) &&
+ (ch_obj->manualZSLSnapshot == FALSE) &&
+ ch_obj->startZSlSnapshotCalled == TRUE) {
+ LOGI("TRIGGER Stop ZSL. All frame received");
+ mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+ ch_obj->startZSlSnapshotCalled = FALSE;
+ ch_obj->burstSnapNum = 0;
+ ch_obj->stopZslSnapshot = 0;
+ ch_obj->unLockAEC = 1;
+ ch_obj->needLEDFlash = FALSE;
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+ ch_obj->isConfigCapture = FALSE;
+ }
+
+ if (ch_obj->isConfigCapture) {
+ if (ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames != 0) {
+ ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames--;
+ } else {
+ LOGW("Invalid frame config batch index %d max batch = %d",
+ ch_obj->cur_capture_idx, ch_obj->frameConfig.num_batch);
+ }
+
+ if (ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames == 0) {
+ //Received all frames for current batch
+ ch_obj->cur_capture_idx++;
+ ch_obj->bundle.superbuf_queue.expected_frame_id =
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+ ch_obj->bundle.superbuf_queue.good_frame_id =
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+ } else {
+ LOGH("Need %d frames more for batch %d",
+ ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames,
+ ch_obj->cur_capture_idx);
+ }
+ }
+ }
+ /* dispatch superbuf */
+ mm_channel_send_super_buf(&info);
+ } else {
+ /* no superbuf avail, break the loop */
+ break;
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_send_super_buf
+ *
+ * DESCRIPTION: Send super buffers to HAL
+ *
+ * PARAMETERS :
+ * @info : Info of super buffers to be sent in callback
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_channel_send_super_buf(mm_channel_node_info_t *info)
+{
+ if (!info || !info->num_nodes){
+ LOGE("X Error!! Info invalid");
+ return;
+ }
+ mm_channel_queue_node_t *node = NULL;
+
+ LOGH("num nodes %d to send", info->num_nodes);
+ uint32_t idx = 0;
+ mm_channel_t *ch_obj = NULL;
+ for (idx = 0; idx < info->num_nodes; idx++) {
+ node = info->node[idx];
+ ch_obj = info->ch_obj[idx];
+ if ((ch_obj) && (NULL != ch_obj->bundle.super_buf_notify_cb) && node) {
+ mm_camera_cmdcb_t* cb_node = NULL;
+ LOGD("Send superbuf to HAL, pending_cnt=%d",
+ ch_obj->pending_cnt);
+ /* send cam_sem_post to wake up cb thread to dispatch super buffer */
+ cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != cb_node) {
+ memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+ cb_node->cmd_type = MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB;
+ cb_node->u.superbuf.num_bufs = node->num_of_bufs;
+ uint8_t i = 0;
+ for (i = 0; i < node->num_of_bufs; i++) {
+ cb_node->u.superbuf.bufs[i] = node->super_buf[i].buf;
+ }
+ cb_node->u.superbuf.camera_handle = ch_obj->cam_obj->my_hdl;
+ cb_node->u.superbuf.ch_id = ch_obj->my_hdl;
+ cb_node->u.superbuf.bReadyForPrepareSnapshot =
+ ch_obj->bWaitForPrepSnapshotDone;
+ if (ch_obj->unLockAEC == 1) {
+ cb_node->u.superbuf.bUnlockAEC = 1;
+ LOGH("Unlocking AEC");
+ ch_obj->unLockAEC = 0;
+ }
+ /* enqueue to cb thread */
+ cam_queue_enq(&(ch_obj->cb_thread.cmd_queue), cb_node);
+ /* wake up cb thread */
+ cam_sem_post(&(ch_obj->cb_thread.cmd_sem));
+ LOGH("Sent super buf for node[%d] ", idx);
+
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ /* buf done with the unused super buf */
+ uint8_t i = 0;
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ }
+ free(node);
+ } else if ((ch_obj != NULL) && (node != NULL)) {
+ /* buf done with the unused super buf */
+ uint8_t i;
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ free(node);
+ } else {
+ LOGE("node is NULL, debug this");
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_reg_stream_buf_cb
+ *
+ * DESCRIPTION: Register callback for stream buffer
+ *
+ * PARAMETERS :
+ * @my_obj : Channel object
+ * @stream_id : stream that will be linked
+ * @buf_cb : special callback needs to be registered for stream buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_reg_stream_buf_cb (mm_channel_t* my_obj,
+ uint32_t stream_id, mm_stream_data_cb_t buf_cb)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ stream_id);
+
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+ rc = mm_stream_reg_buf_cb(s_obj, buf_cb);
+ }
+
+ return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn
+ *
+ * DESCRIPTION: channel finite state machine entry function. Depends on channel
+ * state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ LOGD("E state = %d", my_obj->state);
+ switch (my_obj->state) {
+ case MM_CHANNEL_STATE_NOTUSED:
+ rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_STOPPED:
+ rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_ACTIVE:
+ rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_PAUSED:
+ rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
+ break;
+ default:
+ LOGD("Not a valid state (%d)", my_obj->state);
+ break;
+ }
+
+ /* unlock ch_lock */
+ pthread_mutex_unlock(&my_obj->ch_lock);
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_notused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in NOT_USED state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ switch (evt) {
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_stopped
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in STOPPED state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E evt = %d", evt);
+ switch (evt) {
+ case MM_CHANNEL_EVT_ADD_STREAM:
+ {
+ uint32_t s_hdl = 0;
+ s_hdl = mm_channel_add_stream(my_obj);
+ *((uint32_t*)out_val) = s_hdl;
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_LINK_STREAM:
+ {
+ mm_camera_stream_link_t *stream_link = NULL;
+ uint32_t s_hdl = 0;
+ stream_link = (mm_camera_stream_link_t *) in_val;
+ s_hdl = mm_channel_link_stream(my_obj, stream_link);
+ *((uint32_t*)out_val) = s_hdl;
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_DEL_STREAM:
+ {
+ uint32_t s_id = *((uint32_t *)in_val);
+ rc = mm_channel_del_stream(my_obj, s_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_START:
+ {
+ rc = mm_channel_start(my_obj);
+ /* first stream started in stopped state
+ * move to active state */
+ if (0 == rc) {
+ my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_CONFIG_STREAM:
+ {
+ mm_evt_paylod_config_stream_t *payload =
+ (mm_evt_paylod_config_stream_t *)in_val;
+ rc = mm_channel_config_stream(my_obj,
+ payload->stream_id,
+ payload->config);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
+ {
+ cam_bundle_config_t *payload =
+ (cam_bundle_config_t *)in_val;
+ rc = mm_channel_get_bundle_info(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_DELETE:
+ {
+ mm_channel_release(my_obj);
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_SET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_set_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+ {
+ uint32_t stream_id = *((uint32_t *)in_val);
+ rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+ {
+ mm_evt_paylod_do_stream_action_t *payload =
+ (mm_evt_paylod_do_stream_action_t *)in_val;
+ rc = mm_channel_do_stream_action(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+ {
+ cam_buf_map_type *payload =
+ (cam_buf_map_type *)in_val;
+ rc = mm_channel_map_stream_buf(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
+ {
+ cam_buf_map_type_list *payload =
+ (cam_buf_map_type_list *)in_val;
+ rc = mm_channel_map_stream_bufs(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+ {
+ cam_buf_unmap_type *payload =
+ (cam_buf_unmap_type *)in_val;
+ rc = mm_channel_unmap_stream_buf(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
+ {
+ mm_evt_paylod_reg_stream_buf_cb *payload =
+ (mm_evt_paylod_reg_stream_buf_cb *)in_val;
+ rc = mm_channel_reg_stream_buf_cb (my_obj,
+ payload->stream_id, payload->buf_cb);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d)",
+ my_obj->state, evt);
+ break;
+ }
+ LOGD("E rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_active
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in ACTIVE state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ LOGD("E evt = %d", evt);
+ switch (evt) {
+ case MM_CHANNEL_EVT_STOP:
+ {
+ rc = mm_channel_stop(my_obj);
+ my_obj->state = MM_CHANNEL_STATE_STOPPED;
+ }
+ break;
+ case MM_CHANNEL_EVT_REQUEST_SUPER_BUF:
+ {
+ mm_camera_req_buf_t *payload =
+ (mm_camera_req_buf_t *)in_val;
+ rc = mm_channel_request_super_buf(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF:
+ {
+ rc = mm_channel_cancel_super_buf_request(my_obj);
+ }
+ break;
+ case MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE:
+ {
+ uint32_t frame_idx = *((uint32_t *)in_val);
+ rc = mm_channel_flush_super_buf_queue(my_obj, frame_idx, CAM_STREAM_TYPE_DEFAULT);
+ }
+ break;
+ case MM_CHANNEL_EVT_START_ZSL_SNAPSHOT:
+ {
+ rc = mm_channel_start_zsl_snapshot(my_obj);
+ }
+ break;
+ case MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT:
+ {
+ rc = mm_channel_stop_zsl_snapshot(my_obj);
+ }
+ break;
+ case MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE:
+ {
+ mm_camera_super_buf_notify_mode_t notify_mode =
+ *((mm_camera_super_buf_notify_mode_t *)in_val);
+ rc = mm_channel_config_notify_mode(my_obj, notify_mode);
+ }
+ break;
+ case MM_CHANNEL_EVT_SET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_set_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+ {
+ uint32_t stream_id = *((uint32_t *)in_val);
+ rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+ {
+ mm_evt_paylod_do_stream_action_t *payload =
+ (mm_evt_paylod_do_stream_action_t *)in_val;
+ rc = mm_channel_do_stream_action(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+ {
+ cam_buf_map_type *payload =
+ (cam_buf_map_type *)in_val;
+ if (payload != NULL) {
+ uint8_t type = payload->type;
+ if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+ (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+ rc = mm_channel_map_stream_buf(my_obj, payload);
+ }
+ } else {
+ LOGE("cannot map regualr stream buf in active state");
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
+ {
+ cam_buf_map_type_list *payload =
+ (cam_buf_map_type_list *)in_val;
+ if ((payload != NULL) && (payload->length > 0)) {
+ uint8_t type = payload->buf_maps[0].type;
+ if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+ (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+ rc = mm_channel_map_stream_bufs(my_obj, payload);
+ }
+ } else {
+ LOGE("cannot map regualr stream buf in active state");
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+ {
+ cam_buf_unmap_type *payload =
+ (cam_buf_unmap_type *)in_val;
+ if (payload != NULL) {
+ uint8_t type = payload->type;
+ if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+ (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+ rc = mm_channel_unmap_stream_buf(my_obj, payload);
+ }
+ } else {
+ LOGE("cannot unmap regualr stream buf in active state");
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_AF_BRACKETING:
+ {
+ LOGH("MM_CHANNEL_EVT_AF_BRACKETING");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_AE_BRACKETING:
+ {
+ LOGH("MM_CHANNEL_EVT_AE_BRACKETING");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_FLASH_BRACKETING:
+ {
+ LOGH("MM_CHANNEL_EVT_FLASH_BRACKETING");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_ZOOM_1X:
+ {
+ LOGH("MM_CHANNEL_EVT_ZOOM_1X");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CAMERA_EVT_CAPTURE_SETTING:
+ {
+ mm_camera_generic_cmd_t gen_cmd;
+ cam_capture_frame_config_t *input;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING;
+ LOGH("MM_CAMERA_EVT_CAPTURE_SETTING");
+ if (in_val == NULL) {
+ gen_cmd.payload[0] = 0;
+ memset(&gen_cmd.frame_config, 0, sizeof(cam_capture_frame_config_t));
+ } else {
+ gen_cmd.payload[0] = 1;
+ input = (cam_capture_frame_config_t *)in_val;
+ gen_cmd.frame_config = *input;
+ }
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
+ {
+ mm_evt_paylod_reg_stream_buf_cb *payload =
+ (mm_evt_paylod_reg_stream_buf_cb *)in_val;
+ rc = mm_channel_reg_stream_buf_cb (my_obj,
+ payload->stream_id, payload->buf_cb);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ break;
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_paused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in PAUSED state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ /* currently we are not supporting pause/resume channel */
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_init
+ *
+ * DESCRIPTION: initialize a channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object be to initialized
+ * @attr : bundle attribute of the channel if needed
+ * @channel_cb : callback function for bundle data notify
+ * @userdata : user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if no bundle data notify is needed, meaning each stream in the
+ * channel will have its own stream data notify callback, then
+ * attr, channel_cb, and userdata can be NULL. In this case,
+ * no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+int32_t mm_channel_init(mm_channel_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ int32_t rc = 0;
+
+ my_obj->bundle.super_buf_notify_cb = channel_cb;
+ my_obj->bundle.user_data = userdata;
+ if (NULL != attr) {
+ my_obj->bundle.superbuf_queue.attr = *attr;
+ }
+
+ LOGD("Launch data poll thread in channel open");
+ snprintf(my_obj->poll_thread[0].threadName, THREAD_NAME_SIZE, "CAM_dataPoll");
+ mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
+ MM_CAMERA_POLL_TYPE_DATA);
+
+ /* change state to stopped state */
+ my_obj->state = MM_CHANNEL_STATE_STOPPED;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_release
+ *
+ * DESCRIPTION: release a channel resource. Channel state will move to UNUSED
+ * state after this call.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_channel_release(mm_channel_t *my_obj)
+{
+ /* stop data poll thread */
+ mm_camera_poll_thread_release(&my_obj->poll_thread[0]);
+
+ /* memset bundle info */
+ memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));
+
+ /* change state to notused state */
+ my_obj->state = MM_CHANNEL_STATE_NOTUSED;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_link_stream
+ *
+ * DESCRIPTION: link a stream from external channel into this channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_link : channel and stream to be linked
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+ mm_camera_stream_link_t *stream_link)
+{
+ uint8_t idx = 0;
+ uint32_t s_hdl = 0;
+ mm_stream_t *stream_obj = NULL;
+ mm_stream_t *stream = NULL;
+
+ if (NULL == stream_link) {
+ LOGE("Invalid stream link");
+ return 0;
+ }
+
+ stream = mm_channel_util_get_stream_by_handler(stream_link->ch,
+ stream_link->stream_id);
+ if (NULL == stream) {
+ return 0;
+ }
+
+ /* check available stream */
+ for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+ if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+ stream_obj = &my_obj->streams[idx];
+ break;
+ }
+ }
+ if (NULL == stream_obj) {
+ LOGE("streams reach max, no more stream allowed to add");
+ return s_hdl;
+ }
+
+ /* initialize stream object */
+ *stream_obj = *stream;
+ stream_obj->linked_stream = stream;
+ s_hdl = stream->my_hdl;
+
+ LOGD("stream handle = %d", s_hdl);
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_add_stream
+ *
+ * DESCRIPTION: add a stream into the channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ uint8_t idx = 0;
+ uint32_t s_hdl = 0;
+ mm_stream_t *stream_obj = NULL;
+
+ LOGD("E");
+ /* check available stream */
+ for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+ if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+ stream_obj = &my_obj->streams[idx];
+ break;
+ }
+ }
+ if (NULL == stream_obj) {
+ LOGE("streams reach max, no more stream allowed to add");
+ return s_hdl;
+ }
+
+ /* initialize stream object */
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+ stream_obj->fd = -1;
+ stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
+ stream_obj->ch_obj = my_obj;
+ pthread_mutex_init(&stream_obj->buf_lock, NULL);
+ pthread_mutex_init(&stream_obj->cb_lock, NULL);
+ pthread_mutex_init(&stream_obj->cmd_lock, NULL);
+ pthread_cond_init(&stream_obj->buf_cond, NULL);
+ memset(stream_obj->buf_status, 0,
+ sizeof(stream_obj->buf_status));
+ stream_obj->state = MM_STREAM_STATE_INITED;
+
+ /* acquire stream */
+ rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
+ if (0 == rc) {
+ s_hdl = stream_obj->my_hdl;
+ } else {
+ /* error during acquire, de-init */
+ pthread_cond_destroy(&stream_obj->buf_cond);
+ pthread_mutex_destroy(&stream_obj->buf_lock);
+ pthread_mutex_destroy(&stream_obj->cb_lock);
+ pthread_mutex_destroy(&stream_obj->cmd_lock);
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+ }
+ LOGD("stream handle = %d", s_hdl);
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_del_stream
+ *
+ * DESCRIPTION: delete a stream from the channel bu its handle
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_id : stream handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : assume steam is stooped before it can be deleted
+ *==========================================================================*/
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+ uint32_t stream_id)
+{
+ int rc = -1;
+ mm_stream_t * stream_obj = NULL;
+ stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL == stream_obj) {
+ LOGE("Invalid Stream Object for stream_id = %d", stream_id);
+ return rc;
+ }
+
+ if (stream_obj->ch_obj != my_obj) {
+ /* Only unlink stream */
+ pthread_mutex_lock(&stream_obj->linked_stream->buf_lock);
+ stream_obj->linked_stream->is_linked = 0;
+ stream_obj->linked_stream->linked_obj = NULL;
+ pthread_mutex_unlock(&stream_obj->linked_stream->buf_lock);
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(stream_obj,
+ MM_STREAM_EVT_RELEASE,
+ NULL,
+ NULL);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_id : stream handle
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int rc = -1;
+ mm_stream_t * stream_obj = NULL;
+ LOGD("E stream ID = %d", stream_id);
+ stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL == stream_obj) {
+ LOGE("Invalid Stream Object for stream_id = %d", stream_id);
+ return rc;
+ }
+
+ if (stream_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ /* set stream fmt */
+ rc = mm_stream_fsm_fn(stream_obj,
+ MM_STREAM_EVT_SET_FMT,
+ (void *)config,
+ NULL);
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel, which should include all
+ * streams within this channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @bundle_info : bundle info to be filled in
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+ cam_bundle_config_t *bundle_info)
+{
+ int i;
+ mm_stream_t *s_obj = NULL;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+ int32_t rc = 0;
+
+ memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+ bundle_info->bundle_id = my_obj->my_hdl;
+ bundle_info->num_of_streams = 0;
+ for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if (my_obj->streams[i].my_hdl > 0) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ my_obj->streams[i].my_hdl);
+ if (NULL != s_obj) {
+ stream_type = s_obj->stream_info->stream_type;
+ if ((CAM_STREAM_TYPE_METADATA != stream_type) &&
+ (s_obj->ch_obj == my_obj)) {
+ bundle_info->stream_ids[bundle_info->num_of_streams++] =
+ s_obj->server_stream_id;
+ }
+ } else {
+ LOGE("cannot find stream obj (%d) by handler (%d)",
+ i, my_obj->streams[i].my_hdl);
+ rc = -1;
+ break;
+ }
+ }
+ }
+ if (rc != 0) {
+ /* error, reset to 0 */
+ memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_start
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ int i = 0, j = 0;
+ mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+ uint8_t num_streams_to_start = 0;
+ uint8_t num_streams_in_bundle_queue = 0;
+ mm_stream_t *s_obj = NULL;
+ int meta_stream_idx = 0;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+ for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if (my_obj->streams[i].my_hdl > 0) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ my_obj->streams[i].my_hdl);
+ if (NULL != s_obj) {
+ stream_type = s_obj->stream_info->stream_type;
+ /* remember meta data stream index */
+ if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
+ (s_obj->ch_obj == my_obj)) {
+ meta_stream_idx = num_streams_to_start;
+ }
+ s_objs[num_streams_to_start++] = s_obj;
+
+ if (!s_obj->stream_info->noFrameExpected) {
+ num_streams_in_bundle_queue++;
+ }
+ }
+ }
+ }
+
+ if (meta_stream_idx > 0 ) {
+ /* always start meta data stream first, so switch the stream object with the first one */
+ s_obj = s_objs[0];
+ s_objs[0] = s_objs[meta_stream_idx];
+ s_objs[meta_stream_idx] = s_obj;
+ }
+
+ if (NULL != my_obj->bundle.super_buf_notify_cb) {
+ /* need to send up cb, therefore launch thread */
+ /* init superbuf queue */
+ mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
+ my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
+ my_obj->bundle.superbuf_queue.expected_frame_id =
+ my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
+ my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
+ my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
+ my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
+ my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
+ my_obj->bundle.superbuf_queue.good_frame_id = 0;
+
+ for (i = 0; i < num_streams_to_start; i++) {
+ /* Only bundle streams that belong to the channel */
+ if(!(s_objs[i]->stream_info->noFrameExpected)) {
+ if (s_objs[i]->ch_obj == my_obj) {
+ /* set bundled flag to streams */
+ s_objs[i]->is_bundled = 1;
+ }
+ my_obj->bundle.superbuf_queue.bundled_streams[j++] = s_objs[i]->my_hdl;
+ }
+ }
+
+ /* launch cb thread for dispatching super buf through cb */
+ snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
+ mm_camera_cmd_thread_launch(&my_obj->cb_thread,
+ mm_channel_dispatch_super_buf,
+ (void*)my_obj);
+
+ /* launch cmd thread for super buf dataCB */
+ snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+ mm_channel_process_stream_buf,
+ (void*)my_obj);
+
+ /* set flag to TRUE */
+ my_obj->bundle.is_active = TRUE;
+ }
+
+ /* link any streams first before starting the rest of the streams */
+ for (i = 0; i < num_streams_to_start; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+ s_objs[i]->linked_stream->linked_obj = my_obj;
+ s_objs[i]->linked_stream->is_linked = 1;
+ pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+ continue;
+ }
+ }
+
+ for (i = 0; i < num_streams_to_start; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ continue;
+ }
+ /* all streams within a channel should be started at the same time */
+ if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
+ LOGE("stream already started idx(%d)", i);
+ rc = -1;
+ break;
+ }
+
+ /* allocate buf */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_GET_BUF,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ LOGE("get buf failed at idx(%d)", i);
+ break;
+ }
+
+ /* reg buf */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_REG_BUF,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ LOGE("reg buf failed at idx(%d)", i);
+ break;
+ }
+
+ /* start stream */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_START,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ LOGE("start stream failed at idx(%d)", i);
+ break;
+ }
+ }
+
+ /* error handling */
+ if (0 != rc) {
+ /* unlink the streams first */
+ for (j = 0; j < num_streams_to_start; j++) {
+ if (s_objs[j]->ch_obj != my_obj) {
+ pthread_mutex_lock(&s_objs[j]->linked_stream->buf_lock);
+ s_objs[j]->linked_stream->is_linked = 0;
+ s_objs[j]->linked_stream->linked_obj = NULL;
+ pthread_mutex_unlock(&s_objs[j]->linked_stream->buf_lock);
+
+ if (TRUE == my_obj->bundle.is_active) {
+ mm_channel_flush_super_buf_queue(my_obj, 0,
+ s_objs[i]->stream_info->stream_type);
+ }
+ memset(s_objs[j], 0, sizeof(mm_stream_t));
+ continue;
+ }
+ }
+
+ for (j = 0; j <= i; j++) {
+ if ((NULL == s_objs[j]) || (s_objs[j]->ch_obj != my_obj)) {
+ continue;
+ }
+ /* stop streams*/
+ mm_stream_fsm_fn(s_objs[j],
+ MM_STREAM_EVT_STOP,
+ NULL,
+ NULL);
+
+ /* unreg buf */
+ mm_stream_fsm_fn(s_objs[j],
+ MM_STREAM_EVT_UNREG_BUF,
+ NULL,
+ NULL);
+
+ /* put buf back */
+ mm_stream_fsm_fn(s_objs[j],
+ MM_STREAM_EVT_PUT_BUF,
+ NULL,
+ NULL);
+ }
+
+ /* destroy super buf cmd thread */
+ if (TRUE == my_obj->bundle.is_active) {
+ /* first stop bundle thread */
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+ /* deinit superbuf queue */
+ mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+ /* memset super buffer queue info */
+ my_obj->bundle.is_active = 0;
+ memset(&my_obj->bundle.superbuf_queue, 0, sizeof(mm_channel_queue_t));
+ }
+ }
+ my_obj->bWaitForPrepSnapshotDone = 0;
+ if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ LOGH("registering Channel obj %p", my_obj);
+ mm_frame_sync_register_channel(my_obj);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ int i;
+ mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+ uint8_t num_streams_to_stop = 0;
+ mm_stream_t *s_obj = NULL;
+ int meta_stream_idx = 0;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+ if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ mm_frame_sync_unregister_channel(my_obj);
+ }
+
+ for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if (my_obj->streams[i].my_hdl > 0) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ my_obj->streams[i].my_hdl);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj == my_obj) {
+ stream_type = s_obj->stream_info->stream_type;
+ /* remember meta data stream index */
+ if (stream_type == CAM_STREAM_TYPE_METADATA) {
+ meta_stream_idx = num_streams_to_stop;
+ }
+ }
+ s_objs[num_streams_to_stop++] = s_obj;
+ }
+ }
+ }
+
+ if (meta_stream_idx < num_streams_to_stop - 1 ) {
+ /* always stop meta data stream last, so switch the stream object with the last one */
+ s_obj = s_objs[num_streams_to_stop - 1];
+ s_objs[num_streams_to_stop - 1] = s_objs[meta_stream_idx];
+ s_objs[meta_stream_idx] = s_obj;
+ }
+
+ for (i = 0; i < num_streams_to_stop; i++) {
+ /* stream that are linked to this channel should not be stopped */
+ if (s_objs[i]->ch_obj != my_obj) {
+ continue;
+ }
+
+ /* stream off */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_STOP,
+ NULL,
+ NULL);
+
+ /* unreg buf at kernel */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_UNREG_BUF,
+ NULL,
+ NULL);
+ }
+
+ for (i = 0; i < num_streams_to_stop; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ /* Only unlink stream */
+ pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+ s_objs[i]->linked_stream->is_linked = 0;
+ s_objs[i]->linked_stream->linked_obj = NULL;
+ pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+ }
+ }
+
+ /* destroy super buf cmd thread */
+ if (TRUE == my_obj->bundle.is_active) {
+ mm_channel_flush_super_buf_queue(my_obj, 0, CAM_STREAM_TYPE_DEFAULT);
+ /* first stop bundle thread */
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+ /* deinit superbuf queue */
+ mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+ /* reset few fields in the bundle info */
+ my_obj->bundle.is_active = 0;
+ my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+ my_obj->bundle.superbuf_queue.good_frame_id = 0;
+ my_obj->bundle.superbuf_queue.match_cnt = 0;
+ }
+
+ /* since all streams are stopped, we are safe to
+ * release all buffers allocated in stream */
+ for (i = 0; i < num_streams_to_stop; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ continue;
+ }
+ /* put buf back */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_PUT_BUF,
+ NULL,
+ NULL);
+ }
+
+ for (i = 0; i < num_streams_to_stop; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ memset(s_objs[i], 0, sizeof(mm_stream_t));
+ } else {
+ continue;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ * frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @num_buf_requested : number of matched frames needed
+ * @num_retro_buf_requested : number of retro frames needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+ mm_camera_req_buf_t *buf)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ if(!buf) {
+ LOGE("Request info buf is NULL");
+ return -1;
+ }
+
+ /* set pending_cnt
+ * will trigger dispatching super frames if pending_cnt > 0 */
+ /* send cam_sem_post to wake up cmd thread to dispatch super buffer */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+ node->u.req_buf = *buf;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ * of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ /* reset pending_cnt */
+ mm_camera_req_buf_t buf;
+ memset(&buf, 0x0, sizeof(buf));
+ buf.type = MM_CAMERA_REQ_SUPER_BUF;
+ buf.num_buf_requested = 0;
+ rc = mm_channel_request_super_buf(my_obj, &buf);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @frame_idx : frame idx until which to flush all superbufs
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj, uint32_t frame_idx,
+ cam_stream_type_t stream_type)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_FLUSH_QUEUE;
+ node->u.flush_cmd.frame_idx = frame_idx;
+ node->u.flush_cmd.stream_type = stream_type;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+
+ /* wait for ack from cmd thread */
+ cam_sem_wait(&(my_obj->cmd_thread.sync_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_config_notify_mode
+ *
+ * DESCRIPTION: configure notification mode
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+ mm_camera_super_buf_notify_mode_t notify_mode)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->u.notify_mode = notify_mode;
+ node->cmd_type = MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_START_ZSL;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_STOP_ZSL;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @buf : buf ptr to be enqueued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);
+
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* Redirect to linked stream */
+ rc = mm_stream_fsm_fn(s_obj->linked_stream,
+ MM_STREAM_EVT_QBUF,
+ (void *)buf,
+ NULL);
+ } else {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_QBUF,
+ (void *)buf,
+ NULL);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_id : steam_id
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj, uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* Redirect to linked stream */
+ rc = mm_stream_fsm_fn(s_obj->linked_stream,
+ MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+ NULL,
+ NULL);
+ } else {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+ NULL,
+ NULL);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_SET_PARM,
+ (void *)payload,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_GET_PARM,
+ (void *)payload,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ * if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @s_id : stream handle
+ * @actions : ptr to an action struct buf to be performed by server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the action struct buf is already mapped to server via
+ * domain socket. Actions to be performed by server are already
+ * filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+ mm_evt_paylod_do_stream_action_t *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_DO_ACTION,
+ (void *)payload,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @payload : ptr to payload for mapping
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+ cam_buf_map_type *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_map_buf(s_obj,
+ payload->type,
+ payload->frame_idx,
+ payload->plane_idx,
+ payload->fd,
+ payload->size);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @payload : ptr to payload for mapping
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_bufs(mm_channel_t *my_obj,
+ cam_buf_map_type_list *payload)
+{
+ int32_t rc = -1;
+ if ((payload == NULL) || (payload->length == 0)) {
+ return rc;
+ }
+
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->buf_maps[0].stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_map_bufs(s_obj, payload);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @payload : ptr to unmap payload
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+ cam_buf_unmap_type *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_unmap_buf(s_obj, payload->type,
+ payload->frame_idx, payload->plane_idx);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_queue_init
+ *
+ * DESCRIPTION: initialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ * @queue : ptr to superbuf queue to be initialized
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue)
+{
+ return cam_queue_init(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_queue_deinit
+ *
+ * DESCRIPTION: deinitialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ * @queue : ptr to superbuf queue to be deinitialized
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue)
+{
+ return cam_queue_deinit(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_util_seq_comp_w_rollover
+ *
+ * DESCRIPTION: utility function to handle sequence number comparison with rollover
+ *
+ * PARAMETERS :
+ * @v1 : first value to be compared
+ * @v2 : second value to be compared
+ *
+ * RETURN : int8_t type of comparison result
+ * >0 -- v1 larger than v2
+ * =0 -- vi equal to v2
+ * <0 -- v1 smaller than v2
+ *==========================================================================*/
+int8_t mm_channel_util_seq_comp_w_rollover(uint32_t v1,
+ uint32_t v2)
+{
+ int8_t ret = 0;
+
+ /* TODO: need to handle the case if v2 roll over to 0 */
+ if (v1 > v2) {
+ ret = 1;
+ } else if (v1 < v2) {
+ ret = -1;
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_validate_super_buf.
+ *
+ * DESCRIPTION: Validate incoming buffer with existing super buffer.
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @queue : superbuf queue
+ * @buf_info: new buffer from stream
+ *
+ * RETURN : int8_t type of validation result
+ * >0 -- Valid frame
+ * =0 -- Cannot validate
+ * <0 -- Invalid frame. Can be freed
+ *==========================================================================*/
+int8_t mm_channel_validate_super_buf(mm_channel_t* ch_obj,
+ mm_channel_queue_t *queue, mm_camera_buf_info_t *buf_info)
+{
+ int8_t ret = 0;
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ /* comp */
+ pthread_mutex_lock(&queue->que.lock);
+ head = &queue->que.head.list;
+ /* get the last one in the queue which is possibly having no matching */
+ pos = head->next;
+ while (pos != head) {
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf) {
+ if ((super_buf->expected_frame) &&
+ (buf_info->frame_idx == super_buf->frame_idx)) {
+ //This is good frame. Expecting more frames. Keeping this frame.
+ ret = 1;
+ break;
+ } else {
+ pos = pos->next;
+ continue;
+ }
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_handle_metadata
+ *
+ * DESCRIPTION: Handle frame matching logic change due to metadata
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @queue : superbuf queue
+ * @buf_info: new buffer from stream
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_handle_metadata(
+ mm_channel_t* ch_obj,
+ mm_channel_queue_t * queue,
+ mm_camera_buf_info_t *buf_info)
+{
+
+ int rc = 0 ;
+ mm_stream_t* stream_obj = NULL;
+ stream_obj = mm_channel_util_get_stream_by_handler(ch_obj,
+ buf_info->stream_id);
+ uint8_t is_prep_snapshot_done_valid = 0;
+ uint8_t is_good_frame_idx_range_valid = 0;
+ int32_t prep_snapshot_done_state = 0;
+ cam_frame_idx_range_t good_frame_idx_range;
+ uint8_t is_crop_1x_found = 0;
+ uint32_t snapshot_stream_id = 0;
+ uint32_t i;
+ /* Set expected frame id to a future frame idx, large enough to wait
+ * for good_frame_idx_range, and small enough to still capture an image */
+ const uint32_t max_future_frame_offset = MM_CAMERA_MAX_FUTURE_FRAME_WAIT;
+
+ memset(&good_frame_idx_range, 0, sizeof(good_frame_idx_range));
+
+ if (NULL == stream_obj) {
+ LOGE("Invalid Stream Object for stream_id = %d",
+ buf_info->stream_id);
+ rc = -1;
+ goto end;
+ }
+ if (NULL == stream_obj->stream_info) {
+ LOGE("NULL stream info for stream_id = %d",
+ buf_info->stream_id);
+ rc = -1;
+ goto end;
+ }
+
+ if ((CAM_STREAM_TYPE_METADATA == stream_obj->stream_info->stream_type) &&
+ ((stream_obj->ch_obj == ch_obj) ||
+ ((stream_obj->linked_stream != NULL) &&
+ (stream_obj->linked_stream->linked_obj == ch_obj)))) {
+ const metadata_buffer_t *metadata;
+ metadata = (const metadata_buffer_t *)buf_info->buf->buffer;
+
+ if (NULL == metadata) {
+ LOGE("NULL metadata buffer for metadata stream");
+ rc = -1;
+ goto end;
+ }
+ LOGL("E , expected frame id: %d", queue->expected_frame_id);
+
+ IF_META_AVAILABLE(const int32_t, p_prep_snapshot_done_state,
+ CAM_INTF_META_PREP_SNAPSHOT_DONE, metadata) {
+ prep_snapshot_done_state = *p_prep_snapshot_done_state;
+ is_prep_snapshot_done_valid = 1;
+ LOGH("prepare snapshot done valid ");
+ }
+ IF_META_AVAILABLE(const cam_frame_idx_range_t, p_good_frame_idx_range,
+ CAM_INTF_META_GOOD_FRAME_IDX_RANGE, metadata) {
+ good_frame_idx_range = *p_good_frame_idx_range;
+ is_good_frame_idx_range_valid = 1;
+ LOGH("good_frame_idx_range : min: %d, max: %d , num frames = %d",
+ good_frame_idx_range.min_frame_idx,
+ good_frame_idx_range.max_frame_idx, good_frame_idx_range.num_led_on_frames);
+ }
+ IF_META_AVAILABLE(const cam_crop_data_t, p_crop_data,
+ CAM_INTF_META_CROP_DATA, metadata) {
+ cam_crop_data_t crop_data = *p_crop_data;
+
+ for (i = 0; i < ARRAY_SIZE(ch_obj->streams); i++) {
+ if (MM_STREAM_STATE_NOTUSED == ch_obj->streams[i].state) {
+ continue;
+ }
+ if (CAM_STREAM_TYPE_SNAPSHOT ==
+ ch_obj->streams[i].stream_info->stream_type) {
+ snapshot_stream_id = ch_obj->streams[i].server_stream_id;
+ break;
+ }
+ }
+
+ for (i=0; i<crop_data.num_of_streams; i++) {
+ if (snapshot_stream_id == crop_data.crop_info[i].stream_id) {
+ if (!crop_data.crop_info[i].crop.left &&
+ !crop_data.crop_info[i].crop.top) {
+ is_crop_1x_found = 1;
+ break;
+ }
+ }
+ }
+ }
+
+ IF_META_AVAILABLE(const cam_buf_divert_info_t, p_divert_info,
+ CAM_INTF_BUF_DIVERT_INFO, metadata) {
+ cam_buf_divert_info_t divert_info = *p_divert_info;
+ if (divert_info.frame_id >= buf_info->frame_idx) {
+ ch_obj->diverted_frame_id = divert_info.frame_id;
+ } else {
+ ch_obj->diverted_frame_id = 0;
+ }
+ }
+
+ if (ch_obj->isZoom1xFrameRequested) {
+ if (is_crop_1x_found) {
+ ch_obj->isZoom1xFrameRequested = 0;
+ queue->expected_frame_id = buf_info->frame_idx + 1;
+ } else {
+ queue->expected_frame_id += max_future_frame_offset;
+ /* Flush unwanted frames */
+ mm_channel_superbuf_flush_matched(ch_obj, queue);
+ }
+ goto end;
+ }
+
+ if (ch_obj->startZSlSnapshotCalled && is_good_frame_idx_range_valid) {
+ LOGI("frameID = %d, expected = %d good_frame_idx = %d",
+ buf_info->frame_idx, queue->expected_frame_id,
+ good_frame_idx_range.min_frame_idx);
+ }
+
+ if (is_prep_snapshot_done_valid) {
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ if (prep_snapshot_done_state == NEED_FUTURE_FRAME) {
+ queue->expected_frame_id += max_future_frame_offset;
+ LOGI("PreFlash Done. Need Main Flash");
+
+ mm_channel_superbuf_flush(ch_obj,
+ queue, CAM_STREAM_TYPE_DEFAULT);
+
+ ch_obj->needLEDFlash = TRUE;
+ } else {
+ ch_obj->needLEDFlash = FALSE;
+ }
+ }
+ if (is_good_frame_idx_range_valid) {
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ queue->good_frame_id = good_frame_idx_range.min_frame_idx;
+ if((ch_obj->needLEDFlash == TRUE) && (ch_obj->burstSnapNum > 1)) {
+ queue->led_on_start_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ queue->led_off_start_frame_id =
+ good_frame_idx_range.max_frame_idx;
+ queue->once = 0;
+ queue->led_on_num_frames =
+ good_frame_idx_range.num_led_on_frames;
+ queue->frame_skip_count = good_frame_idx_range.frame_skip_count;
+ LOGD("Need Flash, expected frame id = %d,"
+ " led_on start = %d, led off start = %d, led on frames = %d ",
+ queue->expected_frame_id, queue->led_on_start_frame_id,
+ queue->led_off_start_frame_id, queue->led_on_num_frames);
+ } else {
+ LOGD("No flash, expected frame id = %d ",
+ queue->expected_frame_id);
+ }
+ } else if ((MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX == ch_obj->bracketingState) &&
+ !is_prep_snapshot_done_valid) {
+ /* Flush unwanted frames */
+ mm_channel_superbuf_flush_matched(ch_obj, queue);
+ queue->expected_frame_id += max_future_frame_offset;
+ }
+ if (ch_obj->isFlashBracketingEnabled &&
+ is_good_frame_idx_range_valid) {
+ /* Flash bracketing needs two frames, with & without led flash.
+ * in valid range min frame is with led flash and max frame is
+ * without led flash */
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ /* max frame is without led flash */
+ queue->expected_frame_id_without_led =
+ good_frame_idx_range.max_frame_idx;
+ queue->good_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ } else if (is_good_frame_idx_range_valid) {
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_ACTIVE;
+ queue->good_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ }
+
+ if (ch_obj->isConfigCapture && is_good_frame_idx_range_valid
+ && (good_frame_idx_range.config_batch_idx < ch_obj->frameConfig.num_batch)) {
+
+ LOGI("Frame Config: Expcted ID = %d batch index = %d",
+ good_frame_idx_range.min_frame_idx, good_frame_idx_range.config_batch_idx);
+ ch_obj->capture_frame_id[good_frame_idx_range.config_batch_idx] =
+ good_frame_idx_range.min_frame_idx;
+
+ if (ch_obj->cur_capture_idx == good_frame_idx_range.config_batch_idx) {
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ } else {
+ queue->expected_frame_id =
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+ }
+ queue->good_frame_id = queue->expected_frame_id;
+ }
+
+ if ((ch_obj->burstSnapNum > 1) && (ch_obj->needLEDFlash == TRUE)
+ && !ch_obj->isFlashBracketingEnabled
+ && (MM_CHANNEL_BRACKETING_STATE_OFF == ch_obj->bracketingState)
+ && !ch_obj->isConfigCapture) {
+ if((buf_info->frame_idx >= queue->led_off_start_frame_id)
+ && !queue->once) {
+ LOGD("Burst snap num = %d ",
+ ch_obj->burstSnapNum);
+ // Skip frames from LED OFF frame to get a good frame
+ queue->expected_frame_id = queue->led_off_start_frame_id +
+ queue->frame_skip_count;
+ queue->once = 1;
+ ch_obj->stopZslSnapshot = 1;
+ ch_obj->needLEDFlash = FALSE;
+ LOGD("Reached max led on frames = %d , expected id = %d",
+ buf_info->frame_idx, queue->expected_frame_id);
+ }
+ }
+
+ IF_META_AVAILABLE(const cam_low_light_mode_t, low_light_level,
+ CAM_INTF_META_LOW_LIGHT, metadata) {
+ ch_obj->needLowLightZSL = *low_light_level;
+ }
+
+ // For the instant capture case, if AEC settles before expected frame ID from user,
+ // reset the expected frame ID to current frame index.
+ if (queue->attr.user_expected_frame_id > 0) {
+ if (queue->attr.user_expected_frame_id > buf_info->frame_idx) {
+ IF_META_AVAILABLE(const cam_3a_params_t, ae_params,
+ CAM_INTF_META_AEC_INFO, metadata) {
+ if (ae_params->settled) {
+ queue->expected_frame_id = buf_info->frame_idx;
+ // Reset the expected frame ID from HAL to 0
+ queue->attr.user_expected_frame_id = 0;
+ LOGD("AEC settled, reset expected frame ID from user");
+ }
+ }
+ } else {
+ // Reset the expected frame ID from HAL to 0 after
+ // current frame index is greater than expected id.
+ queue->attr.user_expected_frame_id = 0;
+ LOGD("reset expected frame ID from user as it reached the bound");
+ }
+ }
+ }
+end:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_comp_and_enqueue
+ *
+ * DESCRIPTION: implementation for matching logic for superbuf
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @queue : superbuf queue
+ * @buf_info: new buffer from stream
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_comp_and_enqueue(
+ mm_channel_t* ch_obj,
+ mm_channel_queue_t *queue,
+ mm_camera_buf_info_t *buf_info)
+{
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+ uint8_t buf_s_idx, i, found_super_buf, unmatched_bundles;
+ struct cam_list *last_buf, *insert_before_buf, *last_buf_ptr;
+
+ LOGD("E");
+
+ for (buf_s_idx = 0; buf_s_idx < queue->num_streams; buf_s_idx++) {
+ if (buf_info->stream_id == queue->bundled_streams[buf_s_idx]) {
+ break;
+ }
+ }
+
+ if (buf_s_idx == queue->num_streams) {
+ LOGE("buf from stream (%d) not bundled", buf_info->stream_id);
+ return -1;
+ }
+
+ if(buf_info->frame_idx == 0) {
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ if (mm_channel_handle_metadata(ch_obj, queue, buf_info) < 0) {
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return -1;
+ }
+
+ if ((mm_channel_util_seq_comp_w_rollover(buf_info->frame_idx,
+ queue->expected_frame_id) < 0) &&
+ (mm_channel_validate_super_buf(ch_obj, queue, buf_info) <= 0)) {
+ LOGH("incoming buf id(%d) is older than expected buf id(%d), will discard it",
+ buf_info->frame_idx, queue->expected_frame_id);
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ /* comp */
+ pthread_mutex_lock(&queue->que.lock);
+ head = &queue->que.head.list;
+ /* get the last one in the queue which is possibly having no matching */
+ pos = head->next;
+
+ found_super_buf = 0;
+ unmatched_bundles = 0;
+ last_buf = NULL;
+ insert_before_buf = NULL;
+ last_buf_ptr = NULL;
+
+ while (pos != head) {
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+
+ if (NULL != super_buf) {
+ if (super_buf->matched) {
+ /* find a matched super buf, move to next one */
+ pos = pos->next;
+ continue;
+ } else if (( buf_info->frame_idx == super_buf->frame_idx )
+ /*Pick metadata greater than available frameID*/
+ || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+ && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
+ && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)
+ && (super_buf->frame_idx < buf_info->frame_idx))
+ /*Pick available metadata closest to frameID*/
+ || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+ && (buf_info->buf->stream_type != CAM_STREAM_TYPE_METADATA)
+ && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
+ && (super_buf->unmatched_meta_idx > buf_info->frame_idx))){
+ /*super buffer frame IDs matching OR In low priority bundling
+ metadata frameID greater than avialbale super buffer frameID OR
+ metadata frame closest to incoming frameID will be bundled*/
+ found_super_buf = 1;
+ /* If we are filling into a 'meta only' superbuf, make sure to reset
+ the super_buf frame_idx so that missing streams in this superbuf
+ are filled as per matching frame id logic. Note that, in low priority
+ queue, only meta frame id need not match (closest suffices) but
+ the other streams in this superbuf should have same frame id. */
+ if (super_buf->unmatched_meta_idx > 0) {
+ super_buf->unmatched_meta_idx = 0;
+ super_buf->frame_idx = buf_info->frame_idx;
+ }
+ break;
+ } else {
+ unmatched_bundles++;
+ if ( NULL == last_buf ) {
+ if ( super_buf->frame_idx < buf_info->frame_idx ) {
+ last_buf = pos;
+ }
+ }
+ if ( NULL == insert_before_buf ) {
+ if ( super_buf->frame_idx > buf_info->frame_idx ) {
+ insert_before_buf = pos;
+ }
+ }
+ pos = pos->next;
+ }
+ }
+ }
+
+ if ( found_super_buf ) {
+ if(super_buf->super_buf[buf_s_idx].frame_idx != 0) {
+ //This can cause frame drop. We are overwriting same memory.
+ pthread_mutex_unlock(&queue->que.lock);
+ LOGW("Warning: frame is already in camera ZSL queue");
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ /*Insert incoming buffer to super buffer*/
+ super_buf->super_buf[buf_s_idx] = *buf_info;
+
+ /* check if superbuf is all matched */
+ super_buf->matched = 1;
+ for (i=0; i < super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx == 0) {
+ super_buf->matched = 0;
+ break;
+ }
+ }
+
+ if (super_buf->matched) {
+ if(ch_obj->isFlashBracketingEnabled) {
+ queue->expected_frame_id =
+ queue->expected_frame_id_without_led;
+ if (buf_info->frame_idx >=
+ queue->expected_frame_id_without_led) {
+ ch_obj->isFlashBracketingEnabled = FALSE;
+ }
+ } else {
+ queue->expected_frame_id = buf_info->frame_idx
+ + queue->attr.post_frame_skip;
+ }
+
+ super_buf->expected_frame = FALSE;
+
+ LOGD("curr = %d, skip = %d , Expected Frame ID: %d",
+ buf_info->frame_idx,
+ queue->attr.post_frame_skip, queue->expected_frame_id);
+
+ queue->match_cnt++;
+ if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ pthread_mutex_lock(&fs_lock);
+ mm_frame_sync_add(buf_info->frame_idx, ch_obj);
+ pthread_mutex_unlock(&fs_lock);
+ }
+ /* Any older unmatched buffer need to be released */
+ if ( last_buf ) {
+ while ( last_buf != pos ) {
+ node = member_of(last_buf, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ queue->que.size--;
+ last_buf = last_buf->next;
+ cam_list_del_node(&node->list);
+ free(node);
+ free(super_buf);
+ } else {
+ LOGE("Invalid superbuf in queue!");
+ break;
+ }
+ }
+ }
+ }else {
+ if (ch_obj->diverted_frame_id == buf_info->frame_idx) {
+ super_buf->expected_frame = TRUE;
+ ch_obj->diverted_frame_id = 0;
+ }
+ }
+ } else {
+ if ((queue->attr.max_unmatched_frames < unmatched_bundles)
+ && ( NULL == last_buf )) {
+ /* incoming frame is older than the last bundled one */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ } else {
+ last_buf_ptr = last_buf;
+
+ /* Loop to remove unmatched frames */
+ while ((queue->attr.max_unmatched_frames < unmatched_bundles)
+ && (last_buf_ptr != NULL && last_buf_ptr != pos)) {
+ node = member_of(last_buf_ptr, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf && super_buf->expected_frame == FALSE
+ && (&node->list != insert_before_buf)) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ queue->que.size--;
+ cam_list_del_node(&node->list);
+ free(node);
+ free(super_buf);
+ unmatched_bundles--;
+ }
+ last_buf_ptr = last_buf_ptr->next;
+ }
+
+ if (queue->attr.max_unmatched_frames < unmatched_bundles) {
+ node = member_of(last_buf, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ queue->que.size--;
+ cam_list_del_node(&node->list);
+ free(node);
+ free(super_buf);
+ }
+
+ /* insert the new frame at the appropriate position. */
+
+ mm_channel_queue_node_t *new_buf = NULL;
+ cam_node_t* new_node = NULL;
+
+ new_buf = (mm_channel_queue_node_t*)malloc(sizeof(mm_channel_queue_node_t));
+ new_node = (cam_node_t*)malloc(sizeof(cam_node_t));
+ if (NULL != new_buf && NULL != new_node) {
+ memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+ memset(new_node, 0, sizeof(cam_node_t));
+ new_node->data = (void *)new_buf;
+ new_buf->num_of_bufs = queue->num_streams;
+ new_buf->super_buf[buf_s_idx] = *buf_info;
+ new_buf->frame_idx = buf_info->frame_idx;
+
+ if ((ch_obj->diverted_frame_id == buf_info->frame_idx)
+ || (buf_info->frame_idx == queue->good_frame_id)) {
+ new_buf->expected_frame = TRUE;
+ ch_obj->diverted_frame_id = 0;
+ }
+
+ /* enqueue */
+ if ( insert_before_buf ) {
+ cam_list_insert_before_node(&new_node->list, insert_before_buf);
+ } else {
+ cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+ }
+ queue->que.size++;
+
+ if(queue->num_streams == 1) {
+ new_buf->matched = 1;
+ new_buf->expected_frame = FALSE;
+ queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+ queue->match_cnt++;
+ if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ pthread_mutex_lock(&fs_lock);
+ mm_frame_sync_add(buf_info->frame_idx, ch_obj);
+ pthread_mutex_unlock(&fs_lock);
+ }
+ }
+ /* In low priority queue, this will become a 'meta only' superbuf. Set the
+ unmatched_frame_idx so that the upcoming stream buffers (other than meta)
+ can be filled into this which are nearest to this idx. */
+ if ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+ && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)) {
+ new_buf->unmatched_meta_idx = buf_info->frame_idx;
+ }
+ } else {
+ /* No memory */
+ if (NULL != new_buf) {
+ free(new_buf);
+ }
+ if (NULL != new_node) {
+ free(new_node);
+ }
+ /* qbuf the new buf since we cannot enqueue */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ }
+ }
+ }
+
+ pthread_mutex_unlock(&queue->que.lock);
+ LOGD("X");
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_dequeue_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ * @queue : superbuf queue
+ * @matched_only : if dequeued buf should be matched
+ * @ch_obj : channel object
+ *
+ * RETURN : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_internal(
+ mm_channel_queue_t * queue,
+ uint8_t matched_only, mm_channel_t *ch_obj)
+{
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ head = &queue->que.head.list;
+ pos = head->next;
+ if (pos != head) {
+ /* get the first node */
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if ( (NULL != super_buf) &&
+ (matched_only == TRUE) &&
+ (super_buf->matched == FALSE) ) {
+ /* require to dequeue matched frame only, but this superbuf is not matched,
+ simply set return ptr to NULL */
+ super_buf = NULL;
+ }
+ if (NULL != super_buf) {
+ /* remove from the queue */
+ cam_list_del_node(&node->list);
+ queue->que.size--;
+ if (super_buf->matched == TRUE) {
+ queue->match_cnt--;
+ if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ pthread_mutex_lock(&fs_lock);
+ mm_frame_sync_remove(super_buf->frame_idx);
+ pthread_mutex_unlock(&fs_lock);
+ }
+ }
+ free(node);
+ }
+ }
+
+ return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_dequeue_frame_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue based on frame index
+ * from the superbuf queue
+ *
+ * PARAMETERS :
+ * @queue : superbuf queue
+ * @frame_idx : frame index to be dequeued
+ *
+ * RETURN : ptr to a node from superbuf queue with matched frame index
+ * : NULL if not found
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_frame_internal(
+ mm_channel_queue_t * queue, uint32_t frame_idx)
+{
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ if (!queue) {
+ LOGE("queue is NULL");
+ return NULL;
+ }
+
+ head = &queue->que.head.list;
+ pos = head->next;
+ LOGL("Searching for match frame %d", frame_idx);
+ while ((pos != head) && (pos != NULL)) {
+ /* get the first node */
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (super_buf && super_buf->matched &&
+ (super_buf->frame_idx == frame_idx)) {
+ /* remove from the queue */
+ cam_list_del_node(&node->list);
+ queue->que.size--;
+ queue->match_cnt--;
+ LOGH("Found match frame %d", frame_idx);
+ free(node);
+ break;
+ }
+ else {
+ LOGH("match frame not found %d", frame_idx);
+ super_buf = NULL;
+ }
+ pos = pos->next;
+ }
+ return super_buf;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_dequeue
+ *
+ * DESCRIPTION: dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ * @queue : superbuf queue
+ * @ch_obj : channel object
+ *
+ * RETURN : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(
+ mm_channel_queue_t * queue, mm_channel_t *ch_obj)
+{
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, ch_obj);
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_bufdone_overflow
+ *
+ * DESCRIPTION: keep superbuf queue no larger than watermark set by upper layer
+ * via channel attribute
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+ /* for continuous streaming mode, no overflow is needed */
+ return 0;
+ }
+
+ LOGD("before match_cnt=%d, water_mark=%d",
+ queue->match_cnt, queue->attr.water_mark);
+ /* bufdone overflowed bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ while (queue->match_cnt > queue->attr.water_mark) {
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+ LOGD("after match_cnt=%d, water_mark=%d",
+ queue->match_cnt, queue->attr.water_mark);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_skip
+ *
+ * DESCRIPTION: depends on the lookback configuration of the channel attribute,
+ * unwanted superbufs will be removed from the superbuf queue.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+ /* for continuous streaming mode, no skip is needed */
+ return 0;
+ }
+
+ /* bufdone overflowed bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ while (queue->match_cnt > queue->attr.look_back) {
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_flush
+ *
+ * DESCRIPTION: flush the superbuf queue.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ * @cam_type: flush only particular type (default flushes all)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue, cam_stream_type_t cam_type)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+ /* bufdone bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE, my_obj);
+ while (super_buf != NULL) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ stream_type = super_buf->super_buf[i].buf->stream_type;
+ if ((CAM_STREAM_TYPE_DEFAULT == cam_type) ||
+ (cam_type == stream_type)) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ }
+ free(super_buf);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE, my_obj);
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_proc_general_cmd
+ *
+ * DESCRIPTION: process general command
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+ mm_camera_generic_cmd_t *p_gen_cmd)
+{
+ LOGD("E");
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->u.gen_cmd = *p_gen_cmd;
+ node->cmd_type = MM_CAMERA_CMD_TYPE_GENERAL;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+ LOGD("X");
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_flush_matched
+ *
+ * DESCRIPTION: flush matched buffers from the superbuf queue.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ /* bufdone bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ while (super_buf != NULL) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_reset
+ *
+ * DESCRIPTION: Reset Frame sync info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_frame_sync_reset() {
+ memset(&fs, 0x0, sizeof(fs));
+ LOGD("Reset Done");
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_register_channel
+ *
+ * DESCRIPTION: Register Channel for frame sync
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_register_channel(mm_channel_t *ch_obj) {
+ // Lock frame sync info
+ pthread_mutex_lock(&fs_lock);
+ if ((fs.num_cam >= MAX_NUM_CAMERA_PER_BUNDLE) || (!ch_obj)) {
+ LOGE("Error!! num cam(%d) is out of range ",
+ fs.num_cam);
+ pthread_mutex_unlock(&fs_lock);
+ return -1;
+ }
+ if (fs.num_cam == 0) {
+ LOGH("First channel registering!!");
+ mm_frame_sync_reset();
+ }
+ uint8_t i = 0;
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.ch_obj[i] == NULL) {
+ fs.ch_obj[i] = ch_obj;
+ fs.cb[i] = ch_obj->bundle.super_buf_notify_cb;
+ fs.num_cam++;
+ LOGD("DBG_FS index %d", i);
+ break;
+ }
+ }
+ if (i >= MAX_NUM_CAMERA_PER_BUNDLE) {
+ LOGH("X, DBG_FS Cannot register channel!!");
+ pthread_mutex_unlock(&fs_lock);
+ return -1;
+ }
+ LOGH("num_cam %d ", fs.num_cam);
+ pthread_mutex_unlock(&fs_lock);
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_unregister_channel
+ *
+ * DESCRIPTION: un-register Channel for frame sync
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_unregister_channel(mm_channel_t *ch_obj) {
+ uint8_t i = 0;
+ // Lock frame sync info
+ pthread_mutex_lock(&fs_lock);
+ if (!fs.num_cam || !ch_obj) {
+ LOGH("X, DBG_FS: channel not found !!");
+ // Lock frame sync info
+ pthread_mutex_unlock(&fs_lock);
+ return -1;
+ }
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.ch_obj[i] == ch_obj) {
+ LOGD("found ch_obj at i (%d) ", i);
+ break;
+ }
+ }
+ if (i < MAX_NUM_CAMERA_PER_BUNDLE) {
+ LOGD("remove channel info ");
+ fs.ch_obj[i] = NULL;
+ fs.cb[i] = NULL;
+ fs.num_cam--;
+ } else {
+ LOGD("DBG_FS Channel not found ");
+ }
+ if (fs.num_cam == 0) {
+ mm_frame_sync_reset();
+ }
+ LOGH("X, fs.num_cam %d", fs.num_cam);
+ pthread_mutex_unlock(&fs_lock);
+ return 0;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_add
+ *
+ * DESCRIPTION: Add frame info into frame sync nodes
+ *
+ * PARAMETERS :
+ * @frame_id : frame id to be added
+ * @ch_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_add(uint32_t frame_id, mm_channel_t *ch_obj) {
+
+ LOGD("E, frame id %d ch_obj %p", frame_id, ch_obj);
+ if (!frame_id || !ch_obj) {
+ LOGH("X : Error, cannot add sync frame !!");
+ return -1;
+ }
+
+ int8_t ch_idx = -1;
+ uint8_t i = 0;
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.ch_obj[i] == ch_obj) {
+ ch_idx = i;
+ LOGD("ch id %d ", ch_idx);
+ break;
+ }
+ }
+ if (ch_idx < 0) {
+ LOGH("X : DBG_FS ch not found!!");
+ return -1;
+ }
+ int8_t index = mm_frame_sync_find_frame_index(frame_id);
+ if ((index >= 0) && (index < MM_CAMERA_FRAME_SYNC_NODES)) {
+ fs.node[index].frame_valid[ch_idx] = 1;
+ } else if (index < 0) {
+ if (fs.pos >= MM_CAMERA_FRAME_SYNC_NODES) {
+ fs.pos = 0;
+ }
+ index = fs.pos;
+ memset(&fs.node[index], 0x00, sizeof(mm_channel_sync_node_t));
+ fs.pos++;
+ fs.node[index].frame_idx = frame_id;
+ fs.node[index].frame_valid[ch_idx] = 1;
+ if (fs.num_cam == 1) {
+ LOGD("Single camera frame %d , matched ", frame_id);
+ fs.node[index].matched = 1;
+ }
+ }
+ uint8_t frames_valid = 0;
+ if (!fs.node[index].matched) {
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.node[index].frame_valid[i]) {
+ frames_valid++;
+ }
+ }
+ if (frames_valid == fs.num_cam) {
+ fs.node[index].matched = 1;
+ LOGD("dual camera frame %d , matched ",
+ frame_id);
+ }
+ }
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_remove
+ *
+ * DESCRIPTION: Remove frame info from frame sync nodes
+ *
+ * PARAMETERS :
+ * @frame_id : frame id to be removed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_remove(uint32_t frame_id) {
+ int8_t index = -1;
+
+ LOGD("E, frame_id %d", frame_id);
+ if (!frame_id) {
+ LOGE("X, DBG_FS frame id invalid");
+ return -1;
+ }
+
+ index = mm_frame_sync_find_frame_index(frame_id);
+ if ((index >= 0) && (index < MM_CAMERA_FRAME_SYNC_NODES)) {
+ LOGD("Removing sync frame %d", frame_id);
+ memset(&fs.node[index], 0x00, sizeof(mm_channel_sync_node_t));
+ }
+ LOGD("X ");
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_find_matched
+ *
+ * DESCRIPTION: Find a matched sync frame from the node array
+ *
+ * PARAMETERS :
+ * @oldest : If enabled, find oldest matched frame.,
+ * If not enabled, get the first matched frame found
+ *
+ * RETURN : unt32_t type of status
+ * 0 -- If no matched frames found
+ * frame index: inf matched frame found
+ *==========================================================================*/
+uint32_t mm_frame_sync_find_matched(uint8_t oldest) {
+ LOGH("E, oldest %d ", oldest);
+ uint8_t i = 0;
+ uint32_t frame_idx = 0;
+ uint32_t curr_frame_idx = 0;
+ for (i = 0; i < MM_CAMERA_FRAME_SYNC_NODES; i++) {
+ if (fs.node[i].matched) {
+ curr_frame_idx = fs.node[i].frame_idx;
+ if (!frame_idx) {
+ frame_idx = curr_frame_idx;
+ }
+ if (!oldest) {
+ break;
+ } else if (frame_idx > curr_frame_idx) {
+ frame_idx = curr_frame_idx;
+ }
+ }
+ }
+ LOGH("X, oldest %d frame idx %d", oldest, frame_idx);
+ return frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_find_frame_index
+ *
+ * DESCRIPTION: Find sync frame index if present
+ *
+ * PARAMETERS :
+ * @frame_id : frame id to be searched
+ *
+ * RETURN : int8_t type of status
+ * -1 -- If desired frame not found
+ * index: node array index if frame is found
+ *==========================================================================*/
+int8_t mm_frame_sync_find_frame_index(uint32_t frame_id) {
+
+ LOGD("E, frame_id %d", frame_id);
+ int8_t index = -1, i = 0;
+ for (i = 0; i < MM_CAMERA_FRAME_SYNC_NODES; i++) {
+ if (fs.node[i].frame_idx == frame_id) {
+ index = i;
+ break;
+ }
+ }
+ LOGD("X index :%d", index);
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_lock_queues
+ *
+ * DESCRIPTION: Lock all channel queues present in node info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_frame_sync_lock_queues() {
+ uint8_t j = 0;
+ LOGD("E ");
+ for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+ if (fs.ch_obj[j]) {
+ mm_channel_queue_t *ch_queue =
+ &fs.ch_obj[j]->bundle.superbuf_queue;
+ if (ch_queue) {
+ pthread_mutex_lock(&ch_queue->que.lock);
+ LOGL("Done locking fs.ch_obj[%d] ", j);
+ }
+ }
+ }
+ pthread_mutex_lock(&fs_lock);
+ LOGD("X ");
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_unlock_queues
+ *
+ * DESCRIPTION: Unlock all channel queues
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_frame_sync_unlock_queues() {
+ // Unlock all queues
+ uint8_t j = 0;
+ LOGD("E ");
+ pthread_mutex_unlock(&fs_lock);
+ LOGL("Done unlocking fs ");
+ for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+ if (fs.ch_obj[j]) {
+ mm_channel_queue_t *ch_queue =
+ &fs.ch_obj[j]->bundle.superbuf_queue;
+ if (ch_queue) {
+ pthread_mutex_unlock(&ch_queue->que.lock);
+ LOGL("Done unlocking fs.ch_obj[%d] ", j);
+ }
+ }
+ }
+ LOGD("X ");
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_node_qbuf
+ *
+ * DESCRIPTION: qbuf all buffers in a node
+ *
+ * PARAMETERS :
+ * @ch_obj : Channel info
+ * @node : node to qbuf
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_channel_node_qbuf(mm_channel_t *ch_obj, mm_channel_queue_node_t *node) {
+ uint8_t i;
+ if (!ch_obj || !node) {
+ return;
+ }
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ return;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
new file mode 100644
index 0000000..167e7fe
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -0,0 +1,2052 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <linux/media.h>
+#include <media/msm_cam_sensor.h>
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera_sock.h"
+#include "mm_camera.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+#define CAM_SENSOR_FACING_MASK (1U<<16) // 16th (starting from 0) bit tells its a BACK or FRONT camera
+#define CAM_SENSOR_TYPE_MASK (1U<<24) // 24th (starting from 0) bit tells its a MAIN or AUX camera
+#define CAM_SENSOR_FORMAT_MASK (1U<<25) // 25th (starting from 0) bit tells its YUV sensor or not
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_generate_handler
+ *
+ * DESCRIPTION: utility function to generate handler for camera/channel/stream
+ *
+ * PARAMETERS :
+ * @index: index of the object to have handler
+ *
+ * RETURN : uint32_t type of handle that uniquely identify the object
+ *==========================================================================*/
+uint32_t mm_camera_util_generate_handler(uint8_t index)
+{
+ uint32_t handler = 0;
+ pthread_mutex_lock(&g_handler_lock);
+ g_handler_history_count++;
+ if (0 == g_handler_history_count) {
+ g_handler_history_count++;
+ }
+ handler = g_handler_history_count;
+ handler = (handler<<8) | index;
+ pthread_mutex_unlock(&g_handler_lock);
+ return handler;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_index_by_handler
+ *
+ * DESCRIPTION: utility function to get index from handle
+ *
+ * PARAMETERS :
+ * @handler: object handle
+ *
+ * RETURN : uint8_t type of index derived from handle
+ *==========================================================================*/
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
+{
+ return (handler&0x000000ff);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_dev_name
+ *
+ * DESCRIPTION: utility function to get device name from camera handle
+ *
+ * PARAMETERS :
+ * @cam_handle: camera handle
+ *
+ * RETURN : char ptr to the device name stored in global variable
+ * NOTE : caller should not free the char ptr
+ *==========================================================================*/
+const char *mm_camera_util_get_dev_name(uint32_t cam_handle)
+{
+ char *dev_name = NULL;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+ if(cam_idx < MM_CAMERA_MAX_NUM_SENSORS) {
+ dev_name = g_cam_ctrl.video_dev_name[cam_idx];
+ }
+ return dev_name;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_camera_by_handler
+ *
+ * DESCRIPTION: utility function to get camera object from camera handle
+ *
+ * PARAMETERS :
+ * @cam_handle: camera handle
+ *
+ * RETURN : ptr to the camera object stored in global variable
+ * NOTE : caller should not free the camera object ptr
+ *==========================================================================*/
+mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handle)
+{
+ mm_camera_obj_t *cam_obj = NULL;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+
+ if (cam_idx < MM_CAMERA_MAX_NUM_SENSORS &&
+ (NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+ (cam_handle == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
+ cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+ }
+ return cam_obj;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_query_capability(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E: camera_handler = %d ", camera_handle);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_query_capability(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_parms(uint32_t camera_handle,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_set_parms(my_obj, parms);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_parms(uint32_t camera_handle,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_parms(my_obj, parms);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call success, we will always assume there will
+ * be an auto_focus event following up.
+ *==========================================================================*/
+static int32_t mm_camera_intf_do_auto_focus(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_do_auto_focus(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_auto_focus(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_cancel_auto_focus(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @do_af_flag : flag indicating if AF is needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_prepare_snapshot(uint32_t camera_handle,
+ int32_t do_af_flag)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_prepare_snapshot(my_obj, do_af_flag);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_flush
+ *
+ * DESCRIPTION: flush the current camera state and buffers
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_flush(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_close
+ *
+ * DESCRIPTION: close a camera by its handle
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_close(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ uint8_t cam_idx = camera_handle & 0x00ff;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E: camera_handler = %d ", camera_handle);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if (my_obj){
+ my_obj->ref_count--;
+
+ if(my_obj->ref_count > 0) {
+ /* still have reference to obj, return here */
+ LOGD("ref_count=%d\n", my_obj->ref_count);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = 0;
+ } else {
+ /* need close camera here as no other reference
+ * first empty g_cam_ctrl's referent to cam_obj */
+ g_cam_ctrl.cam_obj[cam_idx] = NULL;
+
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_close(my_obj);
+ pthread_mutex_destroy(&my_obj->cam_lock);
+ free(my_obj);
+ }
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @attr : bundle attribute of the channel if needed
+ * @channel_cb : callback function for bundle data notify
+ * @userdata : user data ptr
+ *
+ * RETURN : uint32_t type of channel handle
+ * 0 -- invalid channel handle, meaning the op failed
+ * >0 -- successfully added a channel with a valid handle
+ * NOTE : if no bundle data notify is needed, meaning each stream in the
+ * channel will have its own stream data notify callback, then
+ * attr, channel_cb, and userdata can be NULL. In this case,
+ * no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ uint32_t ch_id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d", camera_handle);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X ch_id = %d", ch_id);
+ return ch_id;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_channel(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E ch_id = %d", ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_del_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @bundle_info : bundle info to be filled in
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_bundle_info(uint32_t camera_handle,
+ uint32_t ch_id,
+ cam_bundle_config_t *bundle_info)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E ch_id = %d", ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_bundle_info(my_obj, ch_id, bundle_info);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_register_event_notify
+ *
+ * DESCRIPTION: register for event notify
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @evt_cb : callback for event notify
+ * @user_data : user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_event_notify(uint32_t camera_handle,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E ");
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_register_event_notify(my_obj, evt_cb, user_data);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("E rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @buf : buf ptr to be enqueued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_qbuf(uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_qbuf(my_obj, ch_id, buf);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X evt_type = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_queued_buf_count
+ *
+ * DESCRIPTION: returns the queued buffer count
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream id
+ *
+ * RETURN : int32_t - queued buffer count
+ *
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_queued_buf_count(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_queued_buf_count(my_obj, ch_id, stream_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X queued buffer count = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_link_stream
+ *
+ * DESCRIPTION: link a stream into a new channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream id
+ * @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN : int32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+static int32_t mm_camera_intf_link_stream(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint32_t linked_ch_id)
+{
+ uint32_t id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %u ch_id = %u",
+ camera_handle, ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ id = mm_camera_link_stream(my_obj, ch_id, stream_id, linked_ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X stream_id = %u", stream_id);
+ return (int32_t)id;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ uint32_t stream_id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %d ch_id = %d",
+ camera_handle, ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ stream_id = mm_camera_add_stream(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X stream_id = %d", stream_id);
+ return stream_id;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_stream(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %d ch_id = %d stream_id = %d",
+ camera_handle, ch_id, stream_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_del_stream(my_obj, ch_id, stream_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_config_stream(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %d, ch_id = %d,stream_id = %d",
+ camera_handle, ch_id, stream_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("mm_camera_intf_config_stream stream_id = %d",stream_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_config_stream(my_obj, ch_id, stream_id, config);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_start_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_channel(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_stop_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ * frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @buf : request buffer info
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_request_super_buf(uint32_t camera_handle,
+ uint32_t ch_id, mm_camera_req_buf_t *buf)
+{
+ int32_t rc = -1;
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj && buf) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_request_super_buf (my_obj, ch_id, buf);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ * of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_super_buf_request(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @frame_idx : frame index
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush_super_buf_queue(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t frame_idx)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_flush_super_buf_queue(my_obj, ch_id, frame_idx);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_start_zsl_snapshot
+ *
+ * DESCRIPTION: Starts zsl snapshot
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_zsl_snapshot(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_start_zsl_snapshot_ch(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_stop_zsl_snapshot
+ *
+ * DESCRIPTION: Stops zsl snapshot
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_zsl_snapshot(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_stop_zsl_snapshot_ch(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_configure_notify_mode
+ *
+ * DESCRIPTION: Configures channel notification mode
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_configure_notify_mode(uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_super_buf_notify_mode_t notify_mode)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_config_channel_notify(my_obj, ch_id, notify_mode);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_buf(uint32_t camera_handle,
+ uint8_t buf_type,
+ int fd,
+ size_t size)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_buf(my_obj, buf_type, fd, size);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_intf_map_bufs(uint32_t camera_handle,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_bufs(my_obj, buf_map_list);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @buf_type : type of buffer to be unmapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_buf(uint32_t camera_handle,
+ uint8_t buf_type)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_unmap_buf(my_obj, buf_type);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_stream_parms(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d,ch_id = %d,s_id = %d",
+ camera_handle, ch_id, s_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_set_stream_parms(my_obj, ch_id, s_id, parms);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_stream_parms(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d,ch_id = %d,s_id = %d",
+ camera_handle, ch_id, s_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_stream_parms(my_obj, ch_id, s_id, parms);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_buf(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+ camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_stream_buf(my_obj, ch_id, stream_id,
+ buf_type, buf_idx, plane_idx,
+ fd, size);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_bufs(uint32_t camera_handle,
+ uint32_t ch_id,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d, ch_id = %d",
+ camera_handle, ch_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_stream_bufs(my_obj, ch_id, buf_map_list);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be unmapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_stream_buf(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+ camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_unmap_stream_buf(my_obj, ch_id, stream_id,
+ buf_type, buf_idx, plane_idx);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_session_id
+ *
+ * DESCRIPTION: retrieve the session ID from the kernel for this HWI instance
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @sessionid: session id to be retrieved from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call succeeds, we will get a valid session id.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_session_id(uint32_t camera_handle,
+ uint32_t* sessionid)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_session_id(my_obj, sessionid);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_sync_related_sensors
+ *
+ * DESCRIPTION: retrieve the session ID from the kernel for this HWI instance
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @related_cam_info: pointer to the related cam info to be sent to the server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call succeeds, we will get linking established in back end
+ *==========================================================================*/
+static int32_t mm_camera_intf_sync_related_sensors(uint32_t camera_handle,
+ cam_sync_related_sensors_event_info_t* related_cam_info)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_sync_related_sensors(my_obj, related_cam_info);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : get_sensor_info
+ *
+ * DESCRIPTION: get sensor info like facing(back/front) and mount angle
+ *
+ * PARAMETERS :
+ *
+ * RETURN :
+ *==========================================================================*/
+void get_sensor_info()
+{
+ int rc = 0;
+ int dev_fd = -1;
+ struct media_device_info mdev_info;
+ int num_media_devices = 0;
+ size_t num_cameras = 0;
+
+ LOGD("E");
+ while (1) {
+ char dev_name[32];
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ LOGD("Done discovering media devices\n");
+ break;
+ }
+ num_media_devices++;
+ memset(&mdev_info, 0, sizeof(mdev_info));
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ dev_fd = -1;
+ num_cameras = 0;
+ break;
+ }
+
+ if(strncmp(mdev_info.model, MSM_CONFIGURATION_NAME, sizeof(mdev_info.model)) != 0) {
+ close(dev_fd);
+ dev_fd = -1;
+ continue;
+ }
+
+ unsigned int num_entities = 1;
+ while (1) {
+ struct media_entity_desc entity;
+ uint32_t temp;
+ uint32_t mount_angle;
+ uint32_t facing;
+ int32_t type = 0;
+ uint8_t is_yuv;
+
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ LOGD("Done enumerating media entities\n");
+ rc = 0;
+ break;
+ }
+ if(entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+ entity.group_id == MSM_CAMERA_SUBDEV_SENSOR) {
+ temp = entity.flags >> 8;
+ mount_angle = (temp & 0xFF) * 90;
+ facing = ((entity.flags & CAM_SENSOR_FACING_MASK) ?
+ CAMERA_FACING_FRONT:CAMERA_FACING_BACK);
+ /* TODO: Need to revisit this logic if front AUX is available. */
+ if ((unsigned int)facing == CAMERA_FACING_FRONT) {
+ type = CAM_TYPE_STANDALONE;
+ } else if (entity.flags & CAM_SENSOR_TYPE_MASK) {
+ type = CAM_TYPE_AUX;
+ } else {
+ type = CAM_TYPE_MAIN;
+ }
+ is_yuv = ((entity.flags & CAM_SENSOR_FORMAT_MASK) ?
+ CAM_SENSOR_YUV:CAM_SENSOR_RAW);
+ LOGL("index = %u flag = %x mount_angle = %u "
+ "facing = %u type: %u is_yuv = %u\n",
+ (unsigned int)num_cameras, (unsigned int)temp,
+ (unsigned int)mount_angle, (unsigned int)facing,
+ (unsigned int)type, (uint8_t)is_yuv);
+ g_cam_ctrl.info[num_cameras].facing = (int)facing;
+ g_cam_ctrl.info[num_cameras].orientation = (int)mount_angle;
+ g_cam_ctrl.cam_type[num_cameras] = type;
+ g_cam_ctrl.is_yuv[num_cameras] = is_yuv;
+ LOGD("dev_info[id=%zu,name='%s']\n",
+ num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+ num_cameras++;
+ continue;
+ }
+ }
+ close(dev_fd);
+ dev_fd = -1;
+ }
+
+ LOGD("num_cameras=%d\n", g_cam_ctrl.num_cam);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : sort_camera_info
+ *
+ * DESCRIPTION: sort camera info to keep back cameras idx is smaller than front cameras idx
+ *
+ * PARAMETERS : number of cameras
+ *
+ * RETURN :
+ *==========================================================================*/
+void sort_camera_info(int num_cam)
+{
+ int idx = 0, i;
+ struct camera_info temp_info[MM_CAMERA_MAX_NUM_SENSORS];
+ cam_sync_type_t temp_type[MM_CAMERA_MAX_NUM_SENSORS];
+ cam_sync_mode_t temp_mode[MM_CAMERA_MAX_NUM_SENSORS];
+ uint8_t temp_is_yuv[MM_CAMERA_MAX_NUM_SENSORS];
+ char temp_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+
+ memset(temp_info, 0, sizeof(temp_info));
+ memset(temp_dev_name, 0, sizeof(temp_dev_name));
+ memset(temp_type, 0, sizeof(temp_type));
+ memset(temp_mode, 0, sizeof(temp_mode));
+ memset(temp_is_yuv, 0, sizeof(temp_is_yuv));
+
+ /* TODO: Need to revisit this logic if front AUX is available. */
+
+ /* firstly save the main back cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) &&
+ (g_cam_ctrl.cam_type[i] != CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Back Main Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ /* then save the front cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) &&
+ (g_cam_ctrl.cam_type[i] != CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Front Main Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ /* save the aux back cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) &&
+ (g_cam_ctrl.cam_type[i] == CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Back Aux Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ //TODO: Need to revisit this logic if front AUX is available.
+ /* save the aux front cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) &&
+ (g_cam_ctrl.cam_type[i] == CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Front Aux Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ if (idx <= num_cam) {
+ memcpy(g_cam_ctrl.info, temp_info, sizeof(temp_info));
+ memcpy(g_cam_ctrl.cam_type, temp_type, sizeof(temp_type));
+ memcpy(g_cam_ctrl.cam_mode, temp_mode, sizeof(temp_mode));
+ memcpy(g_cam_ctrl.is_yuv, temp_is_yuv, sizeof(temp_is_yuv));
+ memcpy(g_cam_ctrl.video_dev_name, temp_dev_name, sizeof(temp_dev_name));
+ //Set num cam based on the cameras exposed finally via dual/aux properties.
+ g_cam_ctrl.num_cam = idx;
+ for (i = 0; i < idx; i++) {
+ LOGI("Camera id: %d facing: %d, type: %d is_yuv: %d",
+ i, g_cam_ctrl.info[i].facing, g_cam_ctrl.cam_type[i], g_cam_ctrl.is_yuv[i]);
+ }
+ }
+ LOGI("Number of cameras %d sorted %d", num_cam, idx);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : get_num_of_cameras
+ *
+ * DESCRIPTION: get number of cameras
+ *
+ * PARAMETERS :
+ *
+ * RETURN : number of cameras supported
+ *==========================================================================*/
+uint8_t get_num_of_cameras()
+{
+ int rc = 0;
+ int dev_fd = -1;
+ struct media_device_info mdev_info;
+ int num_media_devices = 0;
+ int8_t num_cameras = 0;
+ char subdev_name[32];
+ int32_t sd_fd = -1;
+ struct sensor_init_cfg_data cfg;
+ char prop[PROPERTY_VALUE_MAX];
+
+ LOGD("E");
+
+ property_get("vold.decrypt", prop, "0");
+ int decrypt = atoi(prop);
+ if (decrypt == 1)
+ return 0;
+
+ /* lock the mutex */
+ pthread_mutex_lock(&g_intf_lock);
+
+ while (1) {
+ uint32_t num_entities = 1U;
+ char dev_name[32];
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ LOGD("Done discovering media devices\n");
+ break;
+ }
+ num_media_devices++;
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ dev_fd = -1;
+ break;
+ }
+
+ if (strncmp(mdev_info.model, MSM_CONFIGURATION_NAME,
+ sizeof(mdev_info.model)) != 0) {
+ close(dev_fd);
+ dev_fd = -1;
+ continue;
+ }
+
+ while (1) {
+ struct media_entity_desc entity;
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ LOGD("entity id %d", entity.id);
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ LOGD("Done enumerating media entities");
+ rc = 0;
+ break;
+ }
+ LOGD("entity name %s type %d group id %d",
+ entity.name, entity.type, entity.group_id);
+ if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+ entity.group_id == MSM_CAMERA_SUBDEV_SENSOR_INIT) {
+ snprintf(subdev_name, sizeof(dev_name), "/dev/%s", entity.name);
+ break;
+ }
+ }
+ close(dev_fd);
+ dev_fd = -1;
+ }
+
+ /* Open sensor_init subdev */
+ sd_fd = open(subdev_name, O_RDWR);
+ if (sd_fd < 0) {
+ LOGE("Open sensor_init subdev failed");
+ return FALSE;
+ }
+
+ cfg.cfgtype = CFG_SINIT_PROBE_WAIT_DONE;
+ cfg.cfg.setting = NULL;
+ if (ioctl(sd_fd, VIDIOC_MSM_SENSOR_INIT_CFG, &cfg) < 0) {
+ LOGE("failed");
+ }
+ close(sd_fd);
+ dev_fd = -1;
+
+
+ num_media_devices = 0;
+ while (1) {
+ uint32_t num_entities = 1U;
+ char dev_name[32];
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ LOGD("Done discovering media devices: %s\n", strerror(errno));
+ break;
+ }
+ num_media_devices++;
+ memset(&mdev_info, 0, sizeof(mdev_info));
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ dev_fd = -1;
+ num_cameras = 0;
+ break;
+ }
+
+ if(strncmp(mdev_info.model, MSM_CAMERA_NAME, sizeof(mdev_info.model)) != 0) {
+ close(dev_fd);
+ dev_fd = -1;
+ continue;
+ }
+
+ while (1) {
+ struct media_entity_desc entity;
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ LOGD("Done enumerating media entities\n");
+ rc = 0;
+ break;
+ }
+ if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+ strlcpy(g_cam_ctrl.video_dev_name[num_cameras],
+ entity.name, sizeof(entity.name));
+ LOGI("dev_info[id=%d,name='%s']\n",
+ (int)num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+ num_cameras++;
+ break;
+ }
+ }
+ close(dev_fd);
+ dev_fd = -1;
+ if (num_cameras >= MM_CAMERA_MAX_NUM_SENSORS) {
+ LOGW("Maximum number of camera reached %d", num_cameras);
+ break;
+ }
+ }
+ g_cam_ctrl.num_cam = num_cameras;
+
+ get_sensor_info();
+ sort_camera_info(g_cam_ctrl.num_cam);
+ /* unlock the mutex */
+ pthread_mutex_unlock(&g_intf_lock);
+ LOGI("num_cameras=%d\n", (int)g_cam_ctrl.num_cam);
+ return(uint8_t)g_cam_ctrl.num_cam;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_process_advanced_capture
+ *
+ * DESCRIPTION: Configures channel advanced capture mode
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @type : advanced capture type
+ * @ch_id : channel handle
+ * @trigger : 1 for start and 0 for cancel/stop
+ * @value : input capture configaration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_process_advanced_capture(uint32_t camera_handle,
+ uint32_t ch_id, mm_camera_advanced_capture_t type,
+ int8_t trigger, void *in_value)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_channel_advanced_capture(my_obj, ch_id, type,
+ (uint32_t)trigger, in_value);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X ");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_register_stream_buf_cb
+ *
+ * DESCRIPTION: Register special callback for stream buffer
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ * @buf_cb : callback function
+ * @buf_type :SYNC/ASYNC
+ * @userdata : userdata pointer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * 1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_stream_buf_cb(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+ mm_camera_stream_cb_type cb_type, void *userdata)
+{
+ int32_t rc = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %u ch_id = %u",
+ camera_handle, ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_reg_stream_buf_cb(my_obj, ch_id, stream_id,
+ buf_cb, cb_type, userdata);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return (int32_t)rc;
+}
+
+struct camera_info *get_cam_info(uint32_t camera_id, cam_sync_type_t *pCamType)
+{
+ *pCamType = g_cam_ctrl.cam_type[camera_id];
+ return &g_cam_ctrl.info[camera_id];
+}
+
+uint8_t is_yuv_sensor(uint32_t camera_id)
+{
+ return g_cam_ctrl.is_yuv[camera_id];
+}
+
+/* camera ops v-table */
+static mm_camera_ops_t mm_camera_ops = {
+ .query_capability = mm_camera_intf_query_capability,
+ .register_event_notify = mm_camera_intf_register_event_notify,
+ .close_camera = mm_camera_intf_close,
+ .set_parms = mm_camera_intf_set_parms,
+ .get_parms = mm_camera_intf_get_parms,
+ .do_auto_focus = mm_camera_intf_do_auto_focus,
+ .cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
+ .prepare_snapshot = mm_camera_intf_prepare_snapshot,
+ .start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
+ .stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
+ .map_buf = mm_camera_intf_map_buf,
+ .map_bufs = mm_camera_intf_map_bufs,
+ .unmap_buf = mm_camera_intf_unmap_buf,
+ .add_channel = mm_camera_intf_add_channel,
+ .delete_channel = mm_camera_intf_del_channel,
+ .get_bundle_info = mm_camera_intf_get_bundle_info,
+ .add_stream = mm_camera_intf_add_stream,
+ .link_stream = mm_camera_intf_link_stream,
+ .delete_stream = mm_camera_intf_del_stream,
+ .config_stream = mm_camera_intf_config_stream,
+ .qbuf = mm_camera_intf_qbuf,
+ .get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
+ .map_stream_buf = mm_camera_intf_map_stream_buf,
+ .map_stream_bufs = mm_camera_intf_map_stream_bufs,
+ .unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
+ .set_stream_parms = mm_camera_intf_set_stream_parms,
+ .get_stream_parms = mm_camera_intf_get_stream_parms,
+ .start_channel = mm_camera_intf_start_channel,
+ .stop_channel = mm_camera_intf_stop_channel,
+ .request_super_buf = mm_camera_intf_request_super_buf,
+ .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
+ .flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
+ .configure_notify_mode = mm_camera_intf_configure_notify_mode,
+ .process_advanced_capture = mm_camera_intf_process_advanced_capture,
+ .get_session_id = mm_camera_intf_get_session_id,
+ .sync_related_sensors = mm_camera_intf_sync_related_sensors,
+ .flush = mm_camera_intf_flush,
+ .register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb
+};
+
+/*===========================================================================
+ * FUNCTION : camera_open
+ *
+ * DESCRIPTION: open a camera by camera index
+ *
+ * PARAMETERS :
+ * @camera_idx : camera index. should within range of 0 to num_of_cameras
+ * @camera_vtbl : ptr to a virtual table containing camera handle and operation table.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * non-zero error code -- failure
+ *==========================================================================*/
+int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl)
+{
+ int32_t rc = 0;
+ mm_camera_obj_t *cam_obj = NULL;
+
+#ifdef QCAMERA_REDEFINE_LOG
+ mm_camera_set_dbg_log_properties();
+#endif
+
+ LOGD("E camera_idx = %d\n", camera_idx);
+ if (camera_idx >= g_cam_ctrl.num_cam) {
+ LOGE("Invalid camera_idx (%d)", camera_idx);
+ return -EINVAL;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ /* opened already */
+ if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
+ /* Add reference */
+ g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
+ pthread_mutex_unlock(&g_intf_lock);
+ LOGD("opened alreadyn");
+ *camera_vtbl = &g_cam_ctrl.cam_obj[camera_idx]->vtbl;
+ return rc;
+ }
+
+ cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+ if(NULL == cam_obj) {
+ pthread_mutex_unlock(&g_intf_lock);
+ LOGE("no mem");
+ return -EINVAL;
+ }
+
+ /* initialize camera obj */
+ memset(cam_obj, 0, sizeof(mm_camera_obj_t));
+ cam_obj->ctrl_fd = -1;
+ cam_obj->ds_fd = -1;
+ cam_obj->ref_count++;
+ cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
+ cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
+ cam_obj->vtbl.ops = &mm_camera_ops;
+ pthread_mutex_init(&cam_obj->cam_lock, NULL);
+ /* unlock global interface lock, if not, in dual camera use case,
+ * current open will block operation of another opened camera obj*/
+ pthread_mutex_lock(&cam_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+
+ rc = mm_camera_open(cam_obj);
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (rc != 0) {
+ LOGE("mm_camera_open err = %d", rc);
+ pthread_mutex_destroy(&cam_obj->cam_lock);
+ g_cam_ctrl.cam_obj[camera_idx] = NULL;
+ free(cam_obj);
+ cam_obj = NULL;
+ pthread_mutex_unlock(&g_intf_lock);
+ *camera_vtbl = NULL;
+ return rc;
+ } else {
+ LOGD("Open succeded\n");
+ g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
+ pthread_mutex_unlock(&g_intf_lock);
+ *camera_vtbl = &cam_obj->vtbl;
+ return 0;
+ }
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
new file mode 100644
index 0000000..85a5d3b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -0,0 +1,294 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_create
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ * @cam_id : camera ID
+ * @sock_type: socket type, TCP/UDP
+ *
+ * RETURN : fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+ int socket_fd;
+ mm_camera_sock_addr_t sock_addr;
+ int sktype;
+ int rc;
+
+ switch (sock_type)
+ {
+ case MM_CAMERA_SOCK_TYPE_UDP:
+ sktype = SOCK_DGRAM;
+ break;
+ case MM_CAMERA_SOCK_TYPE_TCP:
+ sktype = SOCK_STREAM;
+ break;
+ default:
+ LOGE("unknown socket type =%d", sock_type);
+ return -1;
+ }
+ socket_fd = socket(AF_UNIX, sktype, 0);
+ if (socket_fd < 0) {
+ LOGE("error create socket fd =%d", socket_fd);
+ return socket_fd;
+ }
+
+ memset(&sock_addr, 0, sizeof(sock_addr));
+ sock_addr.addr_un.sun_family = AF_UNIX;
+ snprintf(sock_addr.addr_un.sun_path,
+ UNIX_PATH_MAX, QCAMERA_DUMP_FRM_LOCATION"cam_socket%d", cam_id);
+ rc = connect(socket_fd, &sock_addr.addr, sizeof(sock_addr.addr_un));
+ if (0 != rc) {
+ close(socket_fd);
+ socket_fd = -1;
+ LOGE("socket_fd=%d %s ", socket_fd, strerror(errno));
+ }
+
+ LOGD("socket_fd=%d %s", socket_fd,
+ sock_addr.addr_un.sun_path);
+ return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_close
+ *
+ * DESCRIPTION: close domain socket by its fd
+ * @fd : file descriptor for the domain socket to be closed
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+ if (fd >= 0) {
+ close(fd);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_sendmsg
+ *
+ * DESCRIPTION: send msg through domain socket
+ * @fd : socket fd
+ * @msg : pointer to msg to be sent over domain socket
+ * @sendfd : file descriptors to be sent
+ *
+ * RETURN : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ size_t buf_size,
+ int sendfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr * cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+
+ if (msg == NULL) {
+ LOGD("msg is NULL");
+ return -1;
+ }
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+ LOGD("iov_len=%llu",
+ (unsigned long long int)iov[0].iov_len);
+
+ msgh.msg_control = NULL;
+ msgh.msg_controllen = 0;
+
+ /* if sendfd is valid, we need to pass it through control msg */
+ if( sendfd >= 0) {
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+ cmsghp = CMSG_FIRSTHDR(&msgh);
+ if (cmsghp != NULL) {
+ LOGD("Got ctrl msg pointer");
+ cmsghp->cmsg_level = SOL_SOCKET;
+ cmsghp->cmsg_type = SCM_RIGHTS;
+ cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+ *((int *)CMSG_DATA(cmsghp)) = sendfd;
+ LOGD("cmsg data=%d", *((int *) CMSG_DATA(cmsghp)));
+ } else {
+ LOGD("ctrl msg NULL");
+ return -1;
+ }
+ }
+
+ return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_bundle_sendmsg
+ *
+ * DESCRIPTION: send msg through domain socket
+ * @fd : socket fd
+ * @msg : pointer to msg to be sent over domain socket
+ * @sendfds : file descriptors to be sent
+ * @numfds : num of file descriptors to be sent
+ *
+ * RETURN : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_bundle_sendmsg(
+ int fd,
+ void *msg,
+ size_t buf_size,
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+ int numfds)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr * cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int) * numfds)];
+ int *fds_ptr = NULL;
+
+ if (msg == NULL) {
+ LOGD("msg is NULL");
+ return -1;
+ }
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+ LOGD("iov_len=%llu",
+ (unsigned long long int)iov[0].iov_len);
+
+ msgh.msg_control = NULL;
+ msgh.msg_controllen = 0;
+
+ /* if numfds is valid, we need to pass it through control msg */
+ if (numfds > 0) {
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+ cmsghp = CMSG_FIRSTHDR(&msgh);
+ if (cmsghp != NULL) {
+ cmsghp->cmsg_level = SOL_SOCKET;
+ cmsghp->cmsg_type = SCM_RIGHTS;
+ cmsghp->cmsg_len = CMSG_LEN(sizeof(int) * numfds);
+
+ fds_ptr = (int*) CMSG_DATA(cmsghp);
+ memcpy(fds_ptr, sendfds, sizeof(int) * numfds);
+ } else {
+ LOGE("ctrl msg NULL");
+ return -1;
+ }
+ }
+
+ return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_recvmsg
+ *
+ * DESCRIPTION: receive msg from domain socket.
+ * @fd : socket fd
+ * @msg : pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ * need be allocated by the caller
+ * @buf_size: the size of the buf that holds incoming msg
+ * @rcvdfd : pointer to hold recvd file descriptor if not NULL.
+ *
+ * RETURN : the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr *cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+ int rcvd_fd = -1;
+ int rcvd_len = 0;
+
+ if ( (msg == NULL) || (buf_size <= 0) ) {
+ LOGE("msg buf is NULL");
+ return -1;
+ }
+
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+
+ if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+ LOGE("recvmsg failed");
+ return rcvd_len;
+ }
+
+ LOGD("msg_ctrl %p len %zd", msgh.msg_control,
+ msgh.msg_controllen);
+
+ if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+ (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+ if (cmsghp->cmsg_level == SOL_SOCKET &&
+ cmsghp->cmsg_type == SCM_RIGHTS) {
+ LOGD("CtrlMsg is valid");
+ rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+ LOGD("Receieved fd=%d", rcvd_fd);
+ } else {
+ LOGE("Unexpected Control Msg. Line=%d");
+ }
+ }
+
+ if (rcvdfd) {
+ *rcvdfd = rcvd_fd;
+ }
+
+ return rcvd_len;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
new file mode 100644
index 0000000..c187fb3
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -0,0 +1,4581 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <stdlib.h>
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <media/msm_media_info.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+/* internal function decalre */
+int32_t mm_stream_qbuf(mm_stream_t *my_obj,
+ mm_camera_buf_def_t *buf);
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj);
+int32_t mm_stream_set_fmt(mm_stream_t * my_obj);
+int32_t mm_stream_sync_info(mm_stream_t *my_obj);
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_request_buf(mm_stream_t * my_obj);
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_release(mm_stream_t *my_obj);
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *value);
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *value);
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+ void *in_value);
+int32_t mm_stream_streamon(mm_stream_t *my_obj);
+int32_t mm_stream_streamoff(mm_stream_t *my_obj);
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info,
+ uint8_t num_planes);
+int32_t mm_stream_read_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info);
+int32_t mm_stream_write_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *buf);
+
+int32_t mm_stream_config(mm_stream_t *my_obj,
+ mm_camera_stream_config_t *config);
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *frame);
+int32_t mm_stream_get_queued_buf_count(mm_stream_t * my_obj);
+
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj);
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *plns);
+uint32_t mm_stream_calc_lcm(int32_t num1, int32_t num2);
+
+
+/* state machine function declare */
+int32_t mm_stream_fsm_inited(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_acquired(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt);
+
+
+/*===========================================================================
+ * FUNCTION : mm_stream_notify_channel
+ *
+ * DESCRIPTION: function to notify channel object on received buffer
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * 0> -- failure
+ *==========================================================================*/
+int32_t mm_stream_notify_channel(struct mm_channel* ch_obj,
+ mm_camera_buf_info_t *buf_info)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ if ((NULL == ch_obj) || (NULL == buf_info)) {
+ LOGD("Invalid channel/buffer");
+ return -ENODEV;
+ }
+
+ /* send cam_sem_post to wake up channel cmd thread to enqueue
+ * to super buffer */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+ node->u.buf = *buf_info;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(ch_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(ch_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -ENOMEM;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_handle_rcvd_buf
+ *
+ * DESCRIPTION: function to handle newly received stream buffer
+ *
+ * PARAMETERS :
+ * @cam_obj : stream object
+ * @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_stream_handle_rcvd_buf(mm_stream_t *my_obj,
+ mm_camera_buf_info_t *buf_info,
+ uint8_t has_cb)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* enqueue to super buf thread */
+ if (my_obj->is_bundled) {
+ rc = mm_stream_notify_channel(my_obj->ch_obj, buf_info);
+ if (rc < 0) {
+ LOGE("Unable to notify channel");
+ }
+ }
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if(my_obj->is_linked) {
+ /* need to add into super buf for linking, add ref count */
+ my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+
+ rc = mm_stream_notify_channel(my_obj->linked_obj, buf_info);
+ if (rc < 0) {
+ LOGE("Unable to notify channel");
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if(has_cb && my_obj->cmd_thread.is_active) {
+ mm_camera_cmdcb_t* node = NULL;
+
+ /* send cam_sem_post to wake up cmd thread to dispatch dataCB */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+ node->u.buf = *buf_info;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_dispatch_sync_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users on poll thread
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing stream buffer information
+ * @userdata: user data ptr (stream object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_stream_dispatch_sync_data(mm_stream_t * my_obj,
+ mm_stream_data_cb_t *buf_cb, mm_camera_buf_info_t *buf_info)
+{
+ mm_camera_super_buf_t super_buf;
+
+ if (NULL == my_obj || buf_info == NULL ||
+ buf_cb == NULL) {
+ return;
+ }
+
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = 1;
+ super_buf.bufs[0] = buf_info->buf;
+ super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = my_obj->ch_obj->my_hdl;
+ if ((buf_cb != NULL) && (buf_cb->cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC)
+ && (buf_cb->cb_count != 0)) {
+ /* callback */
+ buf_cb->cb(&super_buf, buf_cb->user_data);
+
+ /* if >0, reduce count by 1 every time we called CB until reaches 0
+ * when count reach 0, reset the buf_cb to have no CB */
+ if (buf_cb->cb_count > 0) {
+ buf_cb->cb_count--;
+ if (0 == buf_cb->cb_count) {
+ buf_cb->cb = NULL;
+ buf_cb->user_data = NULL;
+ }
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_data_notify
+ *
+ * DESCRIPTION: callback to handle data notify from kernel
+ *
+ * PARAMETERS :
+ * @user_data : user data ptr (stream object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_stream_data_notify(void* user_data)
+{
+ mm_stream_t *my_obj = (mm_stream_t*)user_data;
+ int32_t i, rc;
+ uint8_t has_cb = 0, length = 0;
+ mm_camera_buf_info_t buf_info;
+
+ if (NULL == my_obj) {
+ return;
+ }
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ if (MM_STREAM_STATE_ACTIVE != my_obj->state) {
+ /* this Cb will only received in active_stream_on state
+ * if not so, return here */
+ LOGE("ERROR!! Wrong state (%d) to receive data notify!",
+ my_obj->state);
+ return;
+ }
+
+ if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ length = 1;
+ } else {
+ length = my_obj->frame_offset.num_planes;
+ }
+
+ memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+ rc = mm_stream_read_msm_frame(my_obj, &buf_info,
+ (uint8_t)length);
+ if (rc != 0) {
+ return;
+ }
+ uint32_t idx = buf_info.buf->buf_idx;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb) {
+ if (my_obj->buf_cb[i].cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC) {
+ /*For every SYNC callback, send data*/
+ mm_stream_dispatch_sync_data(my_obj,
+ &my_obj->buf_cb[i], &buf_info);
+ } else {
+ /* for every ASYNC CB, need ref count */
+ has_cb = 1;
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ /* update buffer location */
+ my_obj->buf_status[idx].in_kernel = 0;
+
+ /* update buf ref count */
+ if (my_obj->is_bundled) {
+ /* need to add into super buf since bundled, add ref count */
+ my_obj->buf_status[idx].buf_refcnt++;
+ }
+ my_obj->buf_status[idx].buf_refcnt =
+ (uint8_t)(my_obj->buf_status[idx].buf_refcnt + has_cb);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_dispatch_app_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing stream buffer information
+ * @userdata: user data ptr (stream object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_stream_t * my_obj = (mm_stream_t *)user_data;
+ mm_camera_buf_info_t* buf_info = NULL;
+ mm_camera_super_buf_t super_buf;
+
+ if (NULL == my_obj) {
+ return;
+ }
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
+ LOGE("Wrong cmd_type (%d) for dataCB",
+ cmd_cb->cmd_type);
+ return;
+ }
+
+ buf_info = &cmd_cb->u.buf;
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = 1;
+ super_buf.bufs[0] = buf_info->buf;
+ super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = my_obj->ch_obj->my_hdl;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb
+ && (my_obj->buf_cb[i].cb_type !=
+ MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
+ if (my_obj->buf_cb[i].cb_count != 0) {
+ /* if <0, means infinite CB
+ * if >0, means CB for certain times
+ * both case we need to call CB */
+
+ /* increase buf ref cnt */
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ /* callback */
+ my_obj->buf_cb[i].cb(&super_buf,
+ my_obj->buf_cb[i].user_data);
+ }
+
+ /* if >0, reduce count by 1 every time we called CB until reaches 0
+ * when count reach 0, reset the buf_cb to have no CB */
+ if (my_obj->buf_cb[i].cb_count > 0) {
+ my_obj->buf_cb[i].cb_count--;
+ if (0 == my_obj->buf_cb[i].cb_count) {
+ my_obj->buf_cb[i].cb = NULL;
+ my_obj->buf_cb[i].user_data = NULL;
+ }
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ /* do buf_done since we increased refcnt by one when has_cb */
+ mm_stream_buf_done(my_obj, buf_info->buf);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_fn
+ *
+ * DESCRIPTION: stream finite state machine entry function. Depends on stream
+ * state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch (my_obj->state) {
+ case MM_STREAM_STATE_NOTUSED:
+ LOGD("Not handling evt in unused state");
+ break;
+ case MM_STREAM_STATE_INITED:
+ rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACQUIRED:
+ rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_CFG:
+ rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_BUFFED:
+ rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_REG:
+ rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACTIVE:
+ rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
+ break;
+ default:
+ LOGD("Not a valid state (%d)", my_obj->state);
+ break;
+ }
+ LOGD("X rc =%d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_inited
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in INITED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+ const char *dev_name_value = NULL;
+ if (NULL == my_obj) {
+ LOGE("NULL camera object\n");
+ return -1;
+ }
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_ACQUIRE:
+ if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
+ LOGE("NULL channel or camera obj\n");
+ rc = -1;
+ break;
+ }
+
+ dev_name_value = mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl);
+ if (NULL == dev_name_value) {
+ LOGE("NULL device name\n");
+ rc = -1;
+ break;
+ }
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+ dev_name_value);
+
+ my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (my_obj->fd < 0) {
+ LOGE("open dev returned %d\n", my_obj->fd);
+ rc = -1;
+ break;
+ }
+ LOGD("open dev fd = %d\n", my_obj->fd);
+ rc = mm_stream_set_ext_mode(my_obj);
+ if (0 == rc) {
+ my_obj->state = MM_STREAM_STATE_ACQUIRED;
+ } else {
+ /* failed setting ext_mode
+ * close fd */
+ close(my_obj->fd);
+ my_obj->fd = -1;
+ break;
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_acquired
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in AQUIRED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_acquired(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_SET_FMT:
+ {
+ mm_camera_stream_config_t *config =
+ (mm_camera_stream_config_t *)in_val;
+
+ rc = mm_stream_config(my_obj, config);
+
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+
+ break;
+ }
+ case MM_STREAM_EVT_RELEASE:
+ rc = mm_stream_release(my_obj);
+ /* change state to not used */
+ my_obj->state = MM_STREAM_STATE_NOTUSED;
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_cfg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in CONFIGURED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_SET_FMT:
+ {
+ mm_camera_stream_config_t *config =
+ (mm_camera_stream_config_t *)in_val;
+
+ rc = mm_stream_config(my_obj, config);
+
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+
+ break;
+ }
+ case MM_STREAM_EVT_RELEASE:
+ rc = mm_stream_release(my_obj);
+ my_obj->state = MM_STREAM_STATE_NOTUSED;
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_BUF:
+ rc = mm_stream_init_bufs(my_obj);
+ /* change state to buff allocated */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_BUFFED;
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_buffed
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in BUFFED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_PUT_BUF:
+ rc = mm_stream_deinit_bufs(my_obj);
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+ break;
+ case MM_STREAM_EVT_REG_BUF:
+ rc = mm_stream_reg_buf(my_obj);
+ /* change state to regged */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_reg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in REGGED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ switch(evt) {
+ case MM_STREAM_EVT_UNREG_BUF:
+ rc = mm_stream_unreg_buf(my_obj);
+
+ /* change state to buffed */
+ my_obj->state = MM_STREAM_STATE_BUFFED;
+ break;
+ case MM_STREAM_EVT_START:
+ {
+ uint8_t has_cb = 0;
+ uint8_t i;
+ /* launch cmd thread if CB is not null */
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if((NULL != my_obj->buf_cb[i].cb) &&
+ (my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
+ has_cb = 1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if (has_cb) {
+ snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+ mm_stream_dispatch_app_data,
+ (void *)my_obj);
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+
+ my_obj->state = MM_STREAM_STATE_ACTIVE;
+ rc = mm_stream_streamon(my_obj);
+ if (0 != rc) {
+ /* failed stream on, need to release cmd thread if it's launched */
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if (has_cb) {
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+ my_obj->state = MM_STREAM_STATE_REG;
+ break;
+ }
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_active
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in ACTIVE
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_QBUF:
+ rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+ break;
+ case MM_STREAM_EVT_GET_QUEUED_BUF_COUNT:
+ rc = mm_stream_get_queued_buf_count(my_obj);
+ break;
+ case MM_STREAM_EVT_STOP:
+ {
+ uint8_t has_cb = 0;
+ uint8_t i;
+ rc = mm_stream_streamoff(my_obj);
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb
+ && my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC) {
+ has_cb = 1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if (has_cb) {
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_DO_ACTION:
+ rc = mm_stream_do_action(my_obj, in_val);
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping stream buffer via domain socket to server.
+ * This function will be passed to upper layer as part of ops table
+ * to be used by upper layer when allocating stream buffers and mapping
+ * buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ * @userdata : user data ptr (stream object)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_map_buf_ops(uint32_t frame_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size,
+ cam_mapping_buf_type type,
+ void *userdata)
+{
+ mm_stream_t *my_obj = (mm_stream_t *)userdata;
+ return mm_stream_map_buf(my_obj,
+ type,
+ frame_idx, plane_idx, fd, size);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_bundled_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping bundled stream buffers via domain socket to server.
+ * This function will be passed to upper layer as part of ops table
+ * to be used by upper layer when allocating stream buffers and mapping
+ * buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ * @buf_map_list : list of buffer mapping information
+ * @userdata : user data ptr (stream object)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_bundled_map_buf_ops(
+ const cam_buf_map_type_list *buf_map_list,
+ void *userdata)
+{
+ mm_stream_t *my_obj = (mm_stream_t *)userdata;
+ return mm_stream_map_bufs(my_obj,
+ buf_map_list);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_unmap_buf_ops
+ *
+ * DESCRIPTION: ops for unmapping stream buffer via domain socket to server.
+ * This function will be passed to upper layer as part of ops table
+ * to be used by upper layer when allocating stream buffers and unmapping
+ * buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @userdata : user data ptr (stream object)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_unmap_buf_ops(uint32_t frame_idx,
+ int32_t plane_idx,
+ cam_mapping_buf_type type,
+ void *userdata)
+{
+ mm_stream_t *my_obj = (mm_stream_t *)userdata;
+ return mm_stream_unmap_buf(my_obj,
+ type,
+ frame_idx,
+ plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_config
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_config(mm_stream_t *my_obj,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = 0;
+ int32_t cb_index = 0;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ my_obj->stream_info = config->stream_info;
+ my_obj->buf_num = (uint8_t) config->stream_info->num_bufs;
+ my_obj->mem_vtbl = config->mem_vtbl;
+ my_obj->padding_info = config->padding_info;
+
+ if (config->stream_cb_sync != NULL) {
+ /* SYNC callback is always placed at index 0*/
+ my_obj->buf_cb[cb_index].cb = config->stream_cb_sync;
+ my_obj->buf_cb[cb_index].user_data = config->userdata;
+ my_obj->buf_cb[cb_index].cb_count = -1; /* infinite by default */
+ my_obj->buf_cb[cb_index].cb_type = MM_CAMERA_STREAM_CB_TYPE_SYNC;
+ cb_index++;
+ }
+ my_obj->buf_cb[cb_index].cb = config->stream_cb;
+ my_obj->buf_cb[cb_index].user_data = config->userdata;
+ my_obj->buf_cb[cb_index].cb_count = -1; /* infinite by default */
+ my_obj->buf_cb[cb_index].cb_type = MM_CAMERA_STREAM_CB_TYPE_ASYNC;
+
+ rc = mm_stream_sync_info(my_obj);
+ if (rc == 0) {
+ rc = mm_stream_set_fmt(my_obj);
+ if (rc < 0) {
+ LOGE("mm_stream_set_fmt failed %d",
+ rc);
+ }
+ }
+
+ my_obj->map_ops.map_ops = mm_stream_map_buf_ops;
+ my_obj->map_ops.bundled_map_ops = mm_stream_bundled_map_buf_ops;
+ my_obj->map_ops.unmap_ops = mm_stream_unmap_buf_ops;
+ my_obj->map_ops.userdata = my_obj;
+
+ if(my_obj->mem_vtbl.set_config_ops != NULL) {
+ my_obj->mem_vtbl.set_config_ops(&my_obj->map_ops,
+ my_obj->mem_vtbl.user_data);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_release
+ *
+ * DESCRIPTION: release a stream resource
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_release(mm_stream_t *my_obj)
+{
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ memset(my_obj->buf_status, 0, sizeof(my_obj->buf_status));
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ /* close fd */
+ if(my_obj->fd >= 0)
+ {
+ close(my_obj->fd);
+ }
+
+ /* destroy mutex */
+ pthread_cond_destroy(&my_obj->buf_cond);
+ pthread_mutex_destroy(&my_obj->buf_lock);
+ pthread_mutex_destroy(&my_obj->cb_lock);
+ pthread_mutex_destroy(&my_obj->cmd_lock);
+
+ /* reset stream obj */
+ memset(my_obj, 0, sizeof(mm_stream_t));
+ my_obj->fd = -1;
+
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_streamon
+ *
+ * DESCRIPTION: stream on a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamon(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ int8_t i;
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for (i = 0; i < my_obj->buf_num; i++) {
+ if ((my_obj->buf_status[i].map_status == 0) &&
+ (my_obj->buf_status[i].in_kernel)) {
+ LOGD("waiting for mapping to done: strm fd = %d",
+ my_obj->fd);
+ struct timespec ts;
+ clock_gettime(CLOCK_REALTIME, &ts);
+ ts.tv_sec += WAIT_TIMEOUT;
+ rc = pthread_cond_timedwait(&my_obj->buf_cond, &my_obj->buf_lock, &ts);
+ if (rc == ETIMEDOUT) {
+ LOGE("Timed out. Abort stream-on \n");
+ rc = -1;
+ }
+ break;
+ } else if (my_obj->buf_status[i].map_status < 0) {
+ LOGD("Buffer mapping failed. Abort Stream On");
+ rc = -1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ if (rc < 0) {
+ /* remove fd from data poll thread in case of failure */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_sync_call);
+ return rc;
+ }
+
+ rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
+ if (rc < 0) {
+ LOGE("ioctl VIDIOC_STREAMON failed: rc=%d, errno %d",
+ rc, errno);
+ /* remove fd from data poll thread in case of failure */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl, mm_camera_sync_call);
+ }
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_streamoff
+ *
+ * DESCRIPTION: stream off a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamoff(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* step1: remove fd from data poll thread */
+ rc = mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_sync_call);
+ if (rc < 0) {
+ /* The error might be due to async update. In this case
+ * wait for all updates to complete before proceeding. */
+ rc = mm_camera_poll_thread_commit_updates(&my_obj->ch_obj->poll_thread[0]);
+ if (rc < 0) {
+ LOGE("Poll sync failed %d",
+ rc);
+ }
+ }
+
+ /* step2: stream off */
+ rc = ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+ if (rc < 0) {
+ LOGE("STREAMOFF failed: %s\n",
+ strerror(errno));
+ }
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_write_user_buf
+ *
+ * DESCRIPTION: dequeue a stream buffer from user buffer queue and fill internal structure
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf : ptr to a struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_write_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = 0, i;
+ int32_t index = -1, count = 0;
+ struct msm_camera_user_buf_cont_t *cont_buf = NULL;
+
+ if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[buf->buf_idx].buf_refcnt--;
+ if (0 == my_obj->buf_status[buf->buf_idx].buf_refcnt) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[buf->buf_idx].buffer;
+ cont_buf->buf_cnt = my_obj->buf[buf->buf_idx].user_buf.bufs_used;
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ cont_buf->buf_idx[i] = my_obj->buf[buf->buf_idx].user_buf.buf_idx[i];
+ }
+ rc = mm_stream_qbuf(my_obj, buf);
+ if(rc < 0) {
+ LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ buf->buf_idx, rc);
+ } else {
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ my_obj->buf[buf->buf_idx].user_buf.buf_idx[i] = -1;
+ }
+ my_obj->buf_status[buf->buf_idx].in_kernel = 1;
+ my_obj->buf[buf->buf_idx].user_buf.buf_in_use = 1;
+ }
+ } else {
+ LOGD("<DEBUG> : ref count pending count :%d idx = %d",
+ my_obj->buf_status[buf->buf_idx].buf_refcnt, buf->buf_idx);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ return rc;
+ }
+
+ if ((my_obj->cur_buf_idx < 0)
+ || (my_obj->cur_buf_idx >= my_obj->buf_num)) {
+ for (i = 0; i < my_obj->buf_num; i++) {
+ if ((my_obj->buf_status[i].in_kernel)
+ || (my_obj->buf[i].user_buf.buf_in_use)) {
+ continue;
+ }
+
+ my_obj->cur_buf_idx = index = i;
+ break;
+ }
+ } else {
+ index = my_obj->cur_buf_idx;
+ }
+
+ if (index == -1) {
+ LOGE("No Free batch buffer");
+ rc = -1;
+ return rc;
+ }
+
+ //Insert Buffer to Batch structure.
+ my_obj->buf[index].user_buf.buf_idx[count] = buf->buf_idx;
+ my_obj->cur_bufs_staged++;
+
+ LOGD("index = %d filled = %d used = %d",
+ index,
+ my_obj->cur_bufs_staged,
+ my_obj->buf[index].user_buf.bufs_used);
+
+ if (my_obj->cur_bufs_staged
+ == my_obj->buf[index].user_buf.bufs_used){
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[index].buf_refcnt--;
+ if (0 == my_obj->buf_status[index].buf_refcnt) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[index].buffer;
+ cont_buf->buf_cnt = my_obj->buf[index].user_buf.bufs_used;
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ cont_buf->buf_idx[i] = my_obj->buf[index].user_buf.buf_idx[i];
+ }
+ rc = mm_stream_qbuf(my_obj, &my_obj->buf[index]);
+ if(rc < 0) {
+ LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ index, rc);
+ } else {
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ my_obj->buf[index].user_buf.buf_idx[i] = -1;
+ }
+ my_obj->buf_status[index].in_kernel = 1;
+ my_obj->buf[index].user_buf.buf_in_use = 1;
+ my_obj->cur_bufs_staged = 0;
+ my_obj->cur_buf_idx = -1;
+ }
+ }else{
+ LOGD("<DEBUG> : ref count pending count :%d idx = %d",
+ my_obj->buf_status[index].buf_refcnt, index);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_read_user_buf
+ *
+ * DESCRIPTION: dequeue a stream buffer from user buffer queue and fill internal structure
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_info : ptr to a struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info)
+{
+ int32_t rc = 0, i;
+ mm_camera_buf_def_t *stream_buf = NULL;
+ struct msm_camera_user_buf_cont_t *user_buf = NULL;
+ nsecs_t interval_nsec = 0, frame_ts = 0, timeStamp = 0;
+ int ts_delta = 0;
+ uint32_t frameID = 0;
+
+ user_buf = (struct msm_camera_user_buf_cont_t *)buf_info->buf->buffer;
+
+ if(user_buf != my_obj->buf[buf_info->buf->buf_idx].buffer) {
+ LOGD("Buffer modified. ERROR");
+ rc = -1;
+ return rc;
+ }
+
+ if (buf_info->buf->frame_idx == 1) {
+ frameID = buf_info->buf->frame_idx;
+ }else {
+ frameID = (buf_info->buf->frame_idx - 1) * user_buf->buf_cnt;
+ }
+
+ timeStamp = (nsecs_t)(buf_info->buf->ts.tv_sec) *
+ 1000000000LL + buf_info->buf->ts.tv_nsec;
+
+ if (timeStamp <= my_obj->prev_timestamp) {
+ LOGE("TimeStamp received less than expected");
+ mm_stream_qbuf(my_obj, buf_info->buf);
+ return rc;
+ } else if (my_obj->prev_timestamp == 0
+ || (my_obj->prev_frameID != buf_info->buf->frame_idx + 1)) {
+ /* For first frame or incase batch is droped */
+ interval_nsec = ((my_obj->stream_info->user_buf_info.frameInterval) * 1000000);
+ my_obj->prev_timestamp = (timeStamp - (nsecs_t)(user_buf->buf_cnt * interval_nsec));
+ } else {
+ ts_delta = timeStamp - my_obj->prev_timestamp;
+ interval_nsec = (nsecs_t)(ts_delta / user_buf->buf_cnt);
+ LOGD("Timestamp delta = %d timestamp = %lld", ts_delta, timeStamp);
+ }
+
+ for (i = 0; i < (int32_t)user_buf->buf_cnt; i++) {
+ buf_info->buf->user_buf.buf_idx[i] = user_buf->buf_idx[i];
+ stream_buf = &my_obj->plane_buf[user_buf->buf_idx[i]];
+ stream_buf->frame_idx = frameID + i;
+
+ frame_ts = (i * interval_nsec) + my_obj->prev_timestamp;
+
+ stream_buf->ts.tv_sec = (frame_ts / 1000000000LL);
+ stream_buf->ts.tv_nsec = (frame_ts - (stream_buf->ts.tv_sec * 1000000000LL));
+ stream_buf->is_uv_subsampled = buf_info->buf->is_uv_subsampled;
+
+ LOGD("buf_index %d, frame_idx %d, stream type %d, timestamp = %lld",
+ stream_buf->buf_idx, stream_buf->frame_idx,
+ my_obj->stream_info->stream_type, frame_ts);
+ }
+
+ buf_info->buf->ts.tv_sec = (my_obj->prev_timestamp / 1000000000LL);
+ buf_info->buf->ts.tv_nsec = (my_obj->prev_timestamp -
+ (buf_info->buf->ts.tv_sec * 1000000000LL));
+
+ buf_info->buf->user_buf.bufs_used = user_buf->buf_cnt;
+ buf_info->buf->user_buf.buf_in_use = 1;
+
+ my_obj->prev_timestamp = timeStamp;
+ my_obj->prev_frameID = buf_info->buf->frame_idx;
+
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_read_msm_frame
+ *
+ * DESCRIPTION: dequeue a stream buffer from kernel queue
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_info : ptr to a struct storing buffer information
+ * @num_planes : number of planes in the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info,
+ uint8_t num_planes)
+{
+ int32_t rc = 0;
+ struct v4l2_buffer vb;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ memset(&vb, 0, sizeof(vb));
+ vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ vb.memory = V4L2_MEMORY_USERPTR;
+ vb.m.planes = &planes[0];
+ vb.length = num_planes;
+
+ rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
+ if (0 > rc) {
+ LOGE("VIDIOC_DQBUF ioctl call failed on stream type %d (rc=%d): %s",
+ my_obj->stream_info->stream_type, rc, strerror(errno));
+ } else {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->queued_buffer_count--;
+ if (0 == my_obj->queued_buffer_count) {
+ LOGH("Stoping poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_async_call);
+ LOGH("Stopped poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ uint32_t idx = vb.index;
+ buf_info->buf = &my_obj->buf[idx];
+ buf_info->frame_idx = vb.sequence;
+ buf_info->stream_id = my_obj->my_hdl;
+
+ buf_info->buf->stream_id = my_obj->my_hdl;
+ buf_info->buf->buf_idx = idx;
+ buf_info->buf->frame_idx = vb.sequence;
+ buf_info->buf->ts.tv_sec = vb.timestamp.tv_sec;
+ buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+ buf_info->buf->flags = vb.flags;
+
+ LOGH("VIDIOC_DQBUF buf_index %d, frame_idx %d, stream type %d, rc %d,"
+ "queued: %d, buf_type = %d flags = %d",
+ vb.index, buf_info->buf->frame_idx,
+ my_obj->stream_info->stream_type, rc,
+ my_obj->queued_buffer_count, buf_info->buf->buf_type,
+ buf_info->buf->flags);
+
+ buf_info->buf->is_uv_subsampled =
+ (vb.reserved == V4L2_PIX_FMT_NV14 || vb.reserved == V4L2_PIX_FMT_NV41);
+
+ if(buf_info->buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ mm_stream_read_user_buf(my_obj, buf_info);
+ }
+
+ if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
+ rc = my_obj->mem_vtbl.clean_invalidate_buf(idx,
+ my_obj->mem_vtbl.user_data);
+ if (0 > rc) {
+ LOGE("Clean invalidate cache failed on buffer index: %d",
+ idx);
+ }
+ } else {
+ LOGE("Clean invalidate cache op not supported");
+ }
+ }
+
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_set_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @in_value : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *in_value)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (in_value != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+ if (rc < 0) {
+ LOGE("Failed to set stream parameter type = %d", in_value->type);
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_get_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @in_value : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be get
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *in_value)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (in_value != NULL) {
+ rc = mm_camera_util_g_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_do_actions
+ *
+ * DESCRIPTION: request server to perform stream based actions
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @in_value : ptr to a struct of actions to be performed by the server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the action struct buf is already mapped to server via
+ * domain socket. Corresponding fields of actions to be performed
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+ void *in_value)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (in_value != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_set_ext_mode
+ *
+ * DESCRIPTION: set stream extended mode to server via v4l2 ioctl
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Server will return a server stream id that uniquely identify
+ * this stream on server side. Later on communication to server
+ * per stream should use this server stream id.
+ *==========================================================================*/
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_streamparm s_parm;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ memset(&s_parm, 0, sizeof(s_parm));
+ s_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+ rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
+ LOGD("stream fd=%d, rc=%d, extended_mode=%d\n",
+ my_obj->fd, rc, s_parm.parm.capture.extendedmode);
+ if (rc == 0) {
+ /* get server stream id */
+ my_obj->server_stream_id = s_parm.parm.capture.extendedmode;
+ } else {
+ LOGE("VIDIOC_S_PARM failed %d, errno %d", rc, errno);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel queue for furture use
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf : ptr to a struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
+{
+ int32_t rc = 0;
+ uint32_t length = 0;
+ struct v4l2_buffer buffer;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d, stream type = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state,
+ my_obj->stream_info->stream_type);
+
+ if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ LOGD("USERPTR num_buf = %d, idx = %d",
+ buf->user_buf.bufs_used, buf->buf_idx);
+ memset(&planes, 0, sizeof(planes));
+ planes[0].length = my_obj->stream_info->user_buf_info.size;
+ planes[0].m.userptr = buf->fd;
+ length = 1;
+ } else {
+ memcpy(planes, buf->planes_buf.planes, sizeof(planes));
+ length = buf->planes_buf.num_planes;
+ }
+
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ buffer.memory = V4L2_MEMORY_USERPTR;
+ buffer.index = (__u32)buf->buf_idx;
+ buffer.m.planes = &planes[0];
+ buffer.length = (__u32)length;
+
+ if ( NULL != my_obj->mem_vtbl.invalidate_buf ) {
+ rc = my_obj->mem_vtbl.invalidate_buf(buffer.index,
+ my_obj->mem_vtbl.user_data);
+ if ( 0 > rc ) {
+ LOGE("Cache invalidate failed on buffer index: %d",
+ buffer.index);
+ return rc;
+ }
+ } else {
+ LOGE("Cache invalidate op not added");
+ }
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->queued_buffer_count++;
+ if (1 == my_obj->queued_buffer_count) {
+ /* Add fd to data poll thread */
+ LOGH("Starting poll on stream %p type: %d",
+ my_obj,my_obj->stream_info->stream_type);
+ rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, my_obj->fd, mm_stream_data_notify, (void*)my_obj,
+ mm_camera_async_call);
+ if (0 > rc) {
+ LOGE("Add poll on stream %p type: %d fd error (rc=%d)",
+ my_obj, my_obj->stream_info->stream_type, rc);
+ } else {
+ LOGH("Started poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if (0 > rc) {
+ LOGE("VIDIOC_QBUF ioctl call failed on stream type %d (rc=%d): %s",
+ my_obj->stream_info->stream_type, rc, strerror(errno));
+ my_obj->queued_buffer_count--;
+ if (0 == my_obj->queued_buffer_count) {
+ /* Remove fd from data poll in case of failing
+ * first buffer queuing attempt */
+ LOGH("Stoping poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_async_call);
+ LOGH("Stopped poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ }
+ } else {
+ LOGH("VIDIOC_QBUF buf_index %d, frame_idx %d stream type %d, rc %d,"
+ " queued: %d, buf_type = %d",
+ buffer.index, buf->frame_idx, my_obj->stream_info->stream_type, rc,
+ my_obj->queued_buffer_count, buf->buf_type);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_request_buf
+ *
+ * DESCRIPTION: This function let kernel know the amount of buffers need to
+ * be registered via v4l2 ioctl.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_request_buf(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_requestbuffers bufreq;
+ uint8_t buf_num = my_obj->buf_num;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ LOGD("buf_num = %d, stream type = %d",
+ buf_num, my_obj->stream_info->stream_type);
+
+ if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+ LOGE("buf num %d > max limit %d\n",
+ buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+ return -1;
+ }
+
+ memset(&bufreq, 0, sizeof(bufreq));
+ bufreq.count = buf_num;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ LOGE("fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d, errno %d",
+ my_obj->fd, rc, errno);
+ }
+
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_need_wait_for_mapping
+ *
+ * DESCRIPTION: Utility function to determine whether to wait for mapping
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int8_t whether wait is necessary
+ * 0 -- no wait
+ * 1 -- wait
+ *==========================================================================*/
+int8_t mm_stream_need_wait_for_mapping(mm_stream_t * my_obj)
+{
+ uint32_t i;
+ int8_t ret = 0;
+
+ for (i = 0; i < my_obj->buf_num; i++) {
+ if ((my_obj->buf_status[i].map_status == 0)
+ && (my_obj->buf_status[i].in_kernel)) {
+ /*do not signal in case if any buffer is not mapped
+ but queued to kernel.*/
+ ret = 1;
+ } else if (my_obj->buf_status[i].map_status < 0) {
+ return 0;
+ }
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_map_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_map_buf(mm_stream_t * my_obj,
+ uint8_t buf_type,
+ uint32_t frame_idx,
+ int32_t plane_idx,
+ int32_t fd,
+ size_t size)
+{
+ int32_t rc = 0;
+ if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+ LOGE("NULL obj of stream/channel/camera");
+ return -1;
+ }
+
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+ packet.payload.buf_map.type = buf_type;
+ packet.payload.buf_map.fd = fd;
+ packet.payload.buf_map.size = size;
+ packet.payload.buf_map.stream_id = my_obj->server_stream_id;
+ packet.payload.buf_map.frame_idx = frame_idx;
+ packet.payload.buf_map.plane_idx = plane_idx;
+ LOGD("mapping buf_type %d, stream_id %d, frame_idx %d, fd %d, size %d",
+ buf_type, my_obj->server_stream_id, frame_idx, fd, size);
+ rc = mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+ &packet, sizeof(cam_sock_packet_t), fd);
+
+ if ((buf_type == CAM_MAPPING_BUF_TYPE_STREAM_BUF)
+ || ((buf_type
+ == CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF)
+ && (my_obj->stream_info != NULL)
+ && (my_obj->stream_info->streaming_mode
+ == CAM_STREAMING_MODE_BATCH))) {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if (rc < 0) {
+ my_obj->buf_status[frame_idx].map_status = -1;
+ } else {
+ my_obj->buf_status[frame_idx].map_status = 1;
+ }
+ if (mm_stream_need_wait_for_mapping(my_obj) == 0) {
+ LOGD("Buffer mapping Done: Signal strm fd = %d",
+ my_obj->fd);
+ pthread_cond_signal(&my_obj->buf_cond);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_map_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_map_list : list of buffer objects to map
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+
+int32_t mm_stream_map_bufs(mm_stream_t * my_obj,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+ LOGE("NULL obj of stream/channel/camera");
+ return -1;
+ }
+
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
+
+ memcpy(&packet.payload.buf_map_list, buf_map_list,
+ sizeof(packet.payload.buf_map_list));
+
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
+ uint32_t numbufs = packet.payload.buf_map_list.length;
+ if (numbufs < 1) {
+ LOGD("No buffers, suppressing the mapping command");
+ return 0;
+ }
+
+ uint32_t i;
+ for (i = 0; i < numbufs; i++) {
+ packet.payload.buf_map_list.buf_maps[i].stream_id = my_obj->server_stream_id;
+ sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
+ }
+
+ for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
+ packet.payload.buf_map_list.buf_maps[i].fd = -1;
+ sendfds[i] = -1;
+ }
+
+ int32_t ret = mm_camera_util_bundled_sendmsg(my_obj->ch_obj->cam_obj,
+ &packet, sizeof(cam_sock_packet_t), sendfds, numbufs);
+ if ((numbufs > 0) && ((buf_map_list->buf_maps[0].type
+ == CAM_MAPPING_BUF_TYPE_STREAM_BUF)
+ || ((buf_map_list->buf_maps[0].type ==
+ CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF)
+ && (my_obj->stream_info != NULL)
+ && (my_obj->stream_info->streaming_mode
+ == CAM_STREAMING_MODE_BATCH)))) {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for (i = 0; i < numbufs; i++) {
+ if (ret < 0) {
+ my_obj->buf_status[i].map_status = -1;
+ } else {
+ my_obj->buf_status[i].map_status = 1;
+ }
+ }
+
+ if (mm_stream_need_wait_for_mapping(my_obj) == 0) {
+ LOGD("Buffer mapping Done: Signal strm fd = %d",
+ my_obj->fd);
+ pthread_cond_signal(&my_obj->buf_cond);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_unmap_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_type : type of buffer to be unmapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unmap_buf(mm_stream_t * my_obj,
+ uint8_t buf_type,
+ uint32_t frame_idx,
+ int32_t plane_idx)
+{
+ if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+ LOGE("NULL obj of stream/channel/camera");
+ return -1;
+ }
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+ packet.payload.buf_unmap.type = buf_type;
+ packet.payload.buf_unmap.stream_id = my_obj->server_stream_id;
+ packet.payload.buf_unmap.frame_idx = frame_idx;
+ packet.payload.buf_unmap.plane_idx = plane_idx;
+ int32_t ret = mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ -1);
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[frame_idx].map_status = 0;
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_init_bufs
+ *
+ * DESCRIPTION: initialize stream buffers needed. This function will request
+ * buffers needed from upper layer through the mem ops table passed
+ * during configuration stage.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
+{
+ int32_t i, rc = 0;
+ uint8_t *reg_flags = NULL;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* deinit buf if it's not NULL*/
+ if (NULL != my_obj->buf) {
+ mm_stream_deinit_bufs(my_obj);
+ }
+
+ rc = my_obj->mem_vtbl.get_bufs(&my_obj->frame_offset,
+ &my_obj->buf_num,
+ &reg_flags,
+ &my_obj->buf,
+ &my_obj->map_ops,
+ my_obj->mem_vtbl.user_data);
+
+ if (0 != rc) {
+ LOGE("Error get buf, rc = %d\n", rc);
+ return rc;
+ }
+
+ for (i = 0; i < my_obj->buf_num; i++) {
+ my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
+ my_obj->buf[i].stream_id = my_obj->my_hdl;
+ my_obj->buf[i].stream_type = my_obj->stream_info->stream_type;
+
+ if (my_obj->buf[i].buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ my_obj->buf[i].user_buf.bufs_used =
+ (int8_t)my_obj->stream_info->user_buf_info.frame_buf_cnt;
+ my_obj->buf[i].user_buf.buf_in_use = reg_flags[i];
+ }
+ }
+
+ if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ my_obj->plane_buf = my_obj->buf[0].user_buf.plane_buf;
+ if (my_obj->plane_buf != NULL) {
+ my_obj->plane_buf_num =
+ my_obj->buf_num *
+ my_obj->stream_info->user_buf_info.frame_buf_cnt;
+ for (i = 0; i < my_obj->plane_buf_num; i++) {
+ my_obj->plane_buf[i].stream_id = my_obj->my_hdl;
+ my_obj->plane_buf[i].stream_type = my_obj->stream_info->stream_type;
+ }
+ }
+ my_obj->cur_bufs_staged = 0;
+ my_obj->cur_buf_idx = -1;
+ }
+
+ free(reg_flags);
+ reg_flags = NULL;
+
+ /* update in stream info about number of stream buffers */
+ my_obj->stream_info->num_bufs = my_obj->buf_num;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_deinit_bufs
+ *
+ * DESCRIPTION: return stream buffers to upper layer through the mem ops table
+ * passed during configuration stage.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+
+ mm_camera_map_unmap_ops_tbl_t ops_tbl;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ if (NULL == my_obj->buf) {
+ LOGD("Buf is NULL, no need to deinit");
+ return rc;
+ }
+
+ /* release bufs */
+ ops_tbl.map_ops = mm_stream_map_buf_ops;
+ ops_tbl.bundled_map_ops = mm_stream_bundled_map_buf_ops;
+ ops_tbl.unmap_ops = mm_stream_unmap_buf_ops;
+ ops_tbl.userdata = my_obj;
+
+ rc = my_obj->mem_vtbl.put_bufs(&ops_tbl,
+ my_obj->mem_vtbl.user_data);
+
+ if (my_obj->plane_buf != NULL) {
+ free(my_obj->plane_buf);
+ my_obj->plane_buf = NULL;
+ }
+
+ free(my_obj->buf);
+ my_obj->buf = NULL;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_reg_buf
+ *
+ * DESCRIPTION: register buffers with kernel by calling v4l2 ioctl QBUF for
+ * each buffer in the stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ uint8_t i;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ rc = mm_stream_request_buf(my_obj);
+ if (rc != 0) {
+ return rc;
+ }
+
+ my_obj->queued_buffer_count = 0;
+ for(i = 0; i < my_obj->buf_num; i++){
+ /* check if need to qbuf initially */
+ if (my_obj->buf_status[i].initial_reg_flag) {
+ rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
+ if (rc != 0) {
+ LOGE("VIDIOC_QBUF rc = %d\n", rc);
+ break;
+ }
+ my_obj->buf_status[i].buf_refcnt = 0;
+ my_obj->buf_status[i].in_kernel = 1;
+ } else {
+ /* the buf is held by upper layer, will not queue into kernel.
+ * add buf reference count */
+ my_obj->buf_status[i].buf_refcnt = 1;
+ my_obj->buf_status[i].in_kernel = 0;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_unreg buf
+ *
+ * DESCRIPTION: unregister all stream buffers from kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj)
+{
+ struct v4l2_requestbuffers bufreq;
+ int32_t i, rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* unreg buf to kernel */
+ bufreq.count = 0;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ LOGE("fd=%d, VIDIOC_REQBUFS failed, rc=%d, errno %d",
+ my_obj->fd, rc, errno);
+ }
+
+ /* reset buf reference count */
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for(i = 0; i < my_obj->buf_num; i++){
+ my_obj->buf_status[i].buf_refcnt = 0;
+ my_obj->buf_status[i].in_kernel = 0;
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_get_v4l2_fmt
+ *
+ * DESCRIPTION: translate camera image format into FOURCC code
+ *
+ * PARAMETERS :
+ * @fmt : camera image format
+ *
+ * RETURN : FOURCC code for image format
+ *==========================================================================*/
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt)
+{
+ uint32_t val = 0;
+ switch(fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+ val = V4L2_PIX_FMT_NV12;
+ break;
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+ val = V4L2_PIX_FMT_NV21;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+ val= V4L2_PIX_FMT_SGBRG10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+ val= V4L2_PIX_FMT_SGRBG10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+ val= V4L2_PIX_FMT_SRGGB10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+ val= V4L2_PIX_FMT_SBGGR10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+ val= V4L2_PIX_FMT_SGBRG12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+ val= V4L2_PIX_FMT_SGRBG12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+ val= V4L2_PIX_FMT_SRGGB12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+ val = V4L2_PIX_FMT_SBGGR12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
+ val= V4L2_PIX_FMT_SGBRG14;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
+ val= V4L2_PIX_FMT_SGRBG14;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
+ val= V4L2_PIX_FMT_SRGGB14;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+ val = V4L2_PIX_FMT_SBGGR14;
+ break;
+ case CAM_FORMAT_YUV_422_NV61:
+ val= V4L2_PIX_FMT_NV61;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+ val= V4L2_PIX_FMT_YUYV;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+ val= V4L2_PIX_FMT_YVYU;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+ val= V4L2_PIX_FMT_UYVY;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+ val= V4L2_PIX_FMT_VYUY;
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ val= V4L2_PIX_FMT_NV12;
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ val= V4L2_PIX_FMT_NV16;
+ break;
+ case CAM_FORMAT_Y_ONLY:
+ val= V4L2_PIX_FMT_GREY;
+ break;
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ val= V4L2_PIX_FMT_Y10;
+ break;
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ val= V4L2_PIX_FMT_Y12;
+ break;
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* No v4l2 format is defined yet for CAM_FORMAT_Y_ONLY_14_BPP */
+ /* val= V4L2_PIX_FMT_Y14; */
+ val = 0;
+ LOGE("Unknown fmt=%d", fmt);
+ break;
+ case CAM_FORMAT_MAX:
+ /* CAM_STREAM_TYPE_DEFAULT,
+ * CAM_STREAM_TYPE_OFFLINE_PROC,
+ * and CAM_STREAM_TYPE_METADATA
+ * set fmt to CAM_FORMAT_MAX*/
+ val = 0;
+ break;
+ default:
+ val = 0;
+ LOGE("Unknown fmt=%d", fmt);
+ break;
+ }
+ LOGD("fmt=%d, val =%d", fmt, val);
+ return val;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_preview
+ *
+ * DESCRIPTION: calculate preview frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int stride = 0, scanline = 0;
+
+ uint32_t width_padding = 0;
+ uint32_t height_padding = 0;
+
+ switch (stream_info->fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ width_padding = padding->width_padding;
+ height_padding = CAM_PAD_TO_2;
+ } else {
+ width_padding = padding->width_padding;
+ height_padding = padding->height_padding;
+ }
+
+ stride = PAD_TO_SIZE(dim->width, width_padding);
+ scanline = PAD_TO_SIZE(dim->height, height_padding);
+
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width, width_padding);
+ scanline = PAD_TO_SIZE(dim->height / 2, height_padding);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+ scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset = 0;
+ buf_planes->plane_info.mp[2].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[2].offset_x = 0;
+ buf_planes->plane_info.mp[2].offset_y = 0;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = dim->height;
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+
+ default:
+ LOGE("Invalid cam_format for preview %d",
+ stream_info->fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_post_view
+ *
+ * DESCRIPTION: calculate postview frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int stride = 0, scanline = 0;
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_64);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+ scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_64);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+ scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset = 0;
+ buf_planes->plane_info.mp[2].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[2].offset_x = 0;
+ buf_planes->plane_info.mp[2].offset_y = 0;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = dim->height;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGE("Invalid cam_format for preview %d",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_snapshot
+ *
+ * DESCRIPTION: calculate snapshot/postproc frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ uint8_t isAFamily = mm_camera_util_chip_is_a_family();
+ int offset_x = 0, offset_y = 0;
+ int stride = 0, scanline = 0;
+
+ if (isAFamily) {
+ stride = dim->width;
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_16);
+ offset_x = 0;
+ offset_y = scanline - dim->height;
+ scanline += offset_y; /* double padding */
+ } else {
+ offset_x = PAD_TO_SIZE(padding->offset_info.offset_x,
+ padding->plane_padding);
+ offset_y = PAD_TO_SIZE(padding->offset_info.offset_y,
+ padding->plane_padding);
+ stride = PAD_TO_SIZE((dim->width +
+ (2 * offset_x)), padding->width_padding);
+ scanline = PAD_TO_SIZE((dim->height +
+ (2 * offset_y)), padding->height_padding);
+ }
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ scanline = scanline/2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].offset_x = offset_x;
+ buf_planes->plane_info.mp[2].offset_y = offset_y;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+ buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len -
+ buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGD("Video format VENUS is not supported = %d",
+ fmt);
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGE("Invalid cam_format for snapshot %d",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_raw
+ *
+ * DESCRIPTION: calculate raw frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+
+ if ((NULL == dim) || (NULL == padding) || (NULL == buf_planes)) {
+ return -1;
+ }
+
+ int32_t stride = PAD_TO_SIZE(dim->width, (int32_t)padding->width_padding);
+ int32_t stride_in_bytes = stride;
+ int32_t scanline = PAD_TO_SIZE(dim->height, (int32_t)padding->height_padding);
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV21:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].stride_in_bytes = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+ case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+ case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+ case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+ case CAM_FORMAT_JPEG_RAW_8BIT:
+ /* 1 plane */
+ /* Every 16 pixels occupy 16 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width =
+ (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_META_RAW_8BIT:
+ // Every 16 pixels occupy 16 bytes
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ break;
+
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR:
+ /* 1 plane */
+ /* Every 16 pixels occupy 16 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY:
+ /* Every 12 pixels occupy 16 bytes */
+ stride = (dim->width + 11)/12 * 12;
+ stride_in_bytes = stride * 8 / 6;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY:
+ /* Every 10 pixels occupy 16 bytes */
+ stride = (dim->width + 9)/10 * 10;
+ stride_in_bytes = stride * 8 / 5;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY:
+ /* Every 64 pixels occupy 80 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_4);
+ stride_in_bytes = PAD_TO_SIZE(stride * 5 / 4, CAM_PAD_TO_8);
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY:
+ /* Every 32 pixels occupy 48 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+ stride_in_bytes = stride * 3 / 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR:
+ /* Every 8 pixels occupy 16 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_8);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY:
+ /* Every 64 pixels occupy 112 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+ stride_in_bytes = stride * 7 / 4;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY:
+ /* Every 16 pixels occupy 32 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ default:
+ LOGE("Invalid cam_format %d for raw stream",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_video
+ *
+ * DESCRIPTION: calculate video frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+ cam_dimension_t *dim, cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int stride = 0, scanline = 0;
+ int meta_stride = 0,meta_scanline = 0;
+
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = dim->width;
+ scanline = dim->height;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ CAM_PAD_TO_2K);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = dim->width;
+ scanline = dim->height / 2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ CAM_PAD_TO_2K);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len -
+ buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+#else
+ LOGD("Video format VENUS is not supported = %d",
+ fmt);
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len -
+ buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGD("Video format VENUS is not supported = %d",
+ fmt);
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+
+#else
+ LOGD("Video format UBWC is not supported = %d",
+ fmt);
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGD("Invalid Video Format = %d", fmt);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_metadata
+ *
+ * DESCRIPTION: calculate metadata frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(dim->width * dim->height),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ buf_planes->plane_info.mp[0].len;
+
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = dim->width;
+ buf_planes->plane_info.mp[0].scanline = dim->height;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_analysis
+ *
+ * DESCRIPTION: calculate analysis frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_analysis(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int32_t offset_x = 0, offset_y = 0;
+ int32_t stride, scanline;
+
+ /* Clip to minimum supported bytes per line */
+ if ((uint32_t)dim->width < padding->min_stride) {
+ stride = (int32_t)padding->min_stride;
+ } else {
+ stride = dim->width;
+ }
+
+ if ((uint32_t)dim->height < padding->min_scanline) {
+ scanline = (int32_t)padding->min_scanline;
+ } else {
+ scanline = dim->height;
+ }
+
+ stride = PAD_TO_SIZE(stride, padding->width_padding);
+ scanline = PAD_TO_SIZE(scanline, padding->height_padding);
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].offset_x = offset_x;
+ buf_planes->plane_info.mp[2].offset_y = offset_y;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+ buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ buf_planes->plane_info.num_planes = 1;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGE("Invalid cam_format for anlysis %d",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_postproc
+ *
+ * DESCRIPTION: calculate postprocess frame offset
+ *
+ * PARAMETERS :
+ * @stream_info: ptr to stream info
+ * @padding : padding information
+ * @plns : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *plns)
+{
+ int32_t rc = 0;
+ cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+ if (stream_info->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+ type = stream_info->reprocess_config.offline.input_type;
+ if (CAM_STREAM_TYPE_DEFAULT == type) {
+ if (plns->plane_info.frame_len == 0) {
+ // take offset from input source
+ *plns = stream_info->reprocess_config.offline.input_buf_planes;
+ return rc;
+ }
+ } else {
+ type = stream_info->reprocess_config.offline.input_type;
+ }
+ } else {
+ type = stream_info->reprocess_config.online.input_stream_type;
+ }
+
+ switch (type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ rc = mm_stream_calc_offset_preview(stream_info,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ rc = mm_stream_calc_offset_post_view(stream_info->fmt,
+ &stream_info->dim,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_CALLBACK:
+ rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ rc = mm_stream_calc_offset_video(stream_info->fmt,
+ &stream_info->dim, plns);
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ rc = mm_stream_calc_offset_raw(stream_info->fmt,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ rc = mm_stream_calc_offset_analysis(stream_info->fmt,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ rc = mm_stream_calc_offset_metadata(&stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+ &stream_info->dim, padding, plns);
+ break;
+ default:
+ LOGE("not supported for stream type %d",
+ type);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+* FUNCTION : mm_stream_calc_lcm
+*
+* DESCRIPTION: calculate LCM of two numbers
+*
+* PARAMETERS :
+* @num1 : number 1
+* @num2 : number 2
+*
+* RETURN : uint32_t type
+*
+*===========================================================================*/
+uint32_t mm_stream_calc_lcm(int32_t num1, int32_t num2)
+{
+ uint32_t lcm = 0;
+ uint32_t temp = 0;
+
+ if ((num1 < 1) && (num2 < 1)) {
+ return 0;
+ } else if (num1 < 1) {
+ return num2;
+ } else if (num2 < 1) {
+ return num1;
+ }
+
+ if (num1 > num2) {
+ lcm = num1;
+ } else {
+ lcm = num2;
+ }
+ temp = lcm;
+
+ while (1) {
+ if (((lcm%num1) == 0) && ((lcm%num2) == 0)) {
+ break;
+ }
+ lcm += temp;
+ }
+ return lcm;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+
+ cam_dimension_t dim = my_obj->stream_info->dim;
+ if (my_obj->stream_info->pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION &&
+ my_obj->stream_info->stream_type != CAM_STREAM_TYPE_VIDEO) {
+ if (my_obj->stream_info->pp_config.rotation == ROTATE_90 ||
+ my_obj->stream_info->pp_config.rotation == ROTATE_270) {
+ // rotated by 90 or 270, need to switch width and height
+ dim.width = my_obj->stream_info->dim.height;
+ dim.height = my_obj->stream_info->dim.width;
+ }
+ }
+
+ switch (my_obj->stream_info->stream_type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ rc = mm_stream_calc_offset_preview(my_obj->stream_info,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ rc = mm_stream_calc_offset_post_view(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_CALLBACK:
+ rc = mm_stream_calc_offset_snapshot(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ rc = mm_stream_calc_offset_postproc(my_obj->stream_info,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ rc = mm_stream_calc_offset_video(my_obj->stream_info->fmt,
+ &dim, &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ rc = mm_stream_calc_offset_raw(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ rc = mm_stream_calc_offset_analysis(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ rc = mm_stream_calc_offset_metadata(&dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ default:
+ LOGE("not supported for stream type %d",
+ my_obj->stream_info->stream_type);
+ rc = -1;
+ break;
+ }
+
+ my_obj->frame_offset = my_obj->stream_info->buf_planes.plane_info;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_sync_info
+ *
+ * DESCRIPTION: synchronize stream information with server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : assume stream info buffer is mapped to server and filled in with
+ * stream information by upper layer. This call will let server to
+ * synchornize the stream information with HAL. If server find any
+ * fields that need to be changed accroding to hardware configuration,
+ * server will modify corresponding fields so that HAL could know
+ * about it.
+ *==========================================================================*/
+int32_t mm_stream_sync_info(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ int32_t value = 0;
+ my_obj->stream_info->stream_svr_id = my_obj->server_stream_id;
+ rc = mm_stream_calc_offset(my_obj);
+
+ if (rc == 0) {
+ rc = mm_camera_util_s_ctrl(my_obj->fd,
+ CAM_PRIV_STREAM_INFO_SYNC,
+ &value);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_set_fmt
+ *
+ * DESCRIPTION: set stream format to kernel via v4l2 ioctl
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_format fmt;
+ struct msm_v4l2_format_data msm_fmt;
+ int i;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ if (my_obj->stream_info->dim.width == 0 ||
+ my_obj->stream_info->dim.height == 0) {
+ LOGE("invalid input[w=%d,h=%d,fmt=%d]\n",
+ my_obj->stream_info->dim.width,
+ my_obj->stream_info->dim.height,
+ my_obj->stream_info->fmt);
+ return -1;
+ }
+
+ memset(&fmt, 0, sizeof(fmt));
+ memset(&msm_fmt, 0, sizeof(msm_fmt));
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ msm_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+
+ msm_fmt.width = (unsigned int)my_obj->stream_info->dim.width;
+ msm_fmt.height = (unsigned int)my_obj->stream_info->dim.height;
+ msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);
+
+ if (my_obj->stream_info->streaming_mode != CAM_STREAMING_MODE_BATCH) {
+ msm_fmt.num_planes = (unsigned char)my_obj->frame_offset.num_planes;
+ for (i = 0; i < msm_fmt.num_planes; i++) {
+ msm_fmt.plane_sizes[i] = my_obj->frame_offset.mp[i].len;
+ }
+ } else {
+ msm_fmt.num_planes = 1;
+ msm_fmt.plane_sizes[0] = my_obj->stream_info->user_buf_info.size;
+ }
+
+ memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
+ rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
+ if (rc < 0) {
+ LOGE("ioctl failed %d, errno %d", rc, errno);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_buf_done
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @frame : frame to be enqueued back to kernel
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *frame)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+ LOGE("Error Trying to free second time?(idx=%d) count=%d\n",
+ frame->buf_idx,
+ my_obj->buf_status[frame->buf_idx].buf_refcnt);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ rc = -1;
+ return rc;
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ rc = mm_stream_write_user_buf(my_obj, frame);
+ } else {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[frame->buf_idx].buf_refcnt--;
+ if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ LOGD("<DEBUG> : Buf done for buffer:%d, stream:%d", frame->buf_idx, frame->stream_type);
+ rc = mm_stream_qbuf(my_obj, frame);
+ if(rc < 0) {
+ LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ frame->buf_idx, rc);
+ } else {
+ my_obj->buf_status[frame->buf_idx].in_kernel = 1;
+ }
+ }else{
+ LOGD("<DEBUG> : Still ref count pending count :%d",
+ my_obj->buf_status[frame->buf_idx].buf_refcnt);
+ LOGD("<DEBUG> : for buffer:%p:%d",
+ my_obj, frame->buf_idx);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ }
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_stream_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t mm_stream_get_queued_buf_count(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ pthread_mutex_lock(&my_obj->buf_lock);
+ rc = my_obj->queued_buffer_count;
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_reg_buf_cb
+ *
+ * DESCRIPTION: Allow other stream to register dataCB at this stream.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @val : callback function to be registered
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+ mm_stream_data_cb_t val)
+{
+ int32_t rc = -1;
+ uint8_t i;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL == my_obj->buf_cb[i].cb) {
+ my_obj->buf_cb[i] = val;
+ rc = 0;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
new file mode 100644
index 0000000..0c740b4
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -0,0 +1,698 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+typedef enum {
+ /* poll entries updated */
+ MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED,
+ /* poll entries updated asynchronous */
+ MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC,
+ /* commit updates */
+ MM_CAMERA_PIPE_CMD_COMMIT,
+ /* exit */
+ MM_CAMERA_PIPE_CMD_EXIT,
+ /* max count */
+ MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+ MM_CAMERA_POLL_TASK_STATE_STOPPED,
+ MM_CAMERA_POLL_TASK_STATE_POLL, /* polling pid in polling state. */
+ MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+ uint32_t cmd;
+ mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_sig_async
+ *
+ * DESCRIPTION: Asynchoronous call to send a command through pipe.
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @cmd : command to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig_async(mm_camera_poll_thread_t *poll_cb,
+ uint32_t cmd)
+{
+ /* send through pipe */
+ /* get the mutex */
+ mm_camera_sig_evt_t cmd_evt;
+
+ LOGD("E cmd = %d",cmd);
+ memset(&cmd_evt, 0, sizeof(cmd_evt));
+ cmd_evt.cmd = cmd;
+ pthread_mutex_lock(&poll_cb->mutex);
+ /* reset the statue to false */
+ poll_cb->status = FALSE;
+
+ /* send cmd to worker */
+ ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+ if (len < 1) {
+ LOGW("len = %lld, errno = %d",
+ (long long int)len, errno);
+ /* Avoid waiting for the signal */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return 0;
+ }
+ LOGD("begin IN mutex write done, len = %lld",
+ (long long int)len);
+ pthread_mutex_unlock(&poll_cb->mutex);
+ LOGD("X");
+ return 0;
+}
+
+
+
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_sig
+ *
+ * DESCRIPTION: synchorinzed call to send a command through pipe.
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @cmd : command to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+ uint32_t cmd)
+{
+ /* send through pipe */
+ /* get the mutex */
+ mm_camera_sig_evt_t cmd_evt;
+
+ LOGD("E cmd = %d",cmd);
+ memset(&cmd_evt, 0, sizeof(cmd_evt));
+ cmd_evt.cmd = cmd;
+ pthread_mutex_lock(&poll_cb->mutex);
+ /* reset the statue to false */
+ poll_cb->status = FALSE;
+ /* send cmd to worker */
+
+ ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+ if(len < 1) {
+ LOGW("len = %lld, errno = %d",
+ (long long int)len, errno);
+ /* Avoid waiting for the signal */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return 0;
+ }
+ LOGD("begin IN mutex write done, len = %lld",
+ (long long int)len);
+ /* wait till worker task gives positive signal */
+ if (FALSE == poll_cb->status) {
+ LOGD("wait");
+ pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+ }
+ /* done */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ LOGD("X");
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_sig
+ *
+ * DESCRIPTION: signal the status of done
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = TRUE;
+ pthread_cond_signal(&poll_cb->cond_v);
+ LOGD("done, in mutex");
+ pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_set_state
+ *
+ * DESCRIPTION: set a polling state
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @state : polling state (stopped/polling)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+ mm_camera_poll_task_state_type_t state)
+{
+ poll_cb->state = state;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_proc_pipe
+ *
+ * DESCRIPTION: polling thread routine to process pipe
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+ ssize_t read_len;
+ int i;
+ mm_camera_sig_evt_t cmd_evt;
+ read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
+ LOGD("read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
+ poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
+ switch (cmd_evt.cmd) {
+ case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
+ case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
+ /* we always have index 0 for pipe read */
+ poll_cb->num_fds = 0;
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+
+ if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type &&
+ poll_cb->num_fds < MAX_STREAM_NUM_IN_BUNDLE) {
+ if (poll_cb->poll_entries[0].fd >= 0) {
+ /* fd is valid, we update poll_fds */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+ }
+ } else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type &&
+ poll_cb->num_fds <= MAX_STREAM_NUM_IN_BUNDLE) {
+ for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if(poll_cb->poll_entries[i].fd >= 0) {
+ /* fd is valid, we update poll_fds to this fd */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+ } else {
+ /* fd is invalid, we set the entry to -1 to prevent polling.
+ * According to spec, polling will not poll on entry with fd=-1.
+ * If this is not the case, we need to skip these invalid fds
+ * when updating this array.
+ * We still keep fd=-1 in this array because this makes easier to
+ * map cb associated with this fd once incoming data avail by directly
+ * using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
+ poll_cb->poll_fds[poll_cb->num_fds].events = 0;
+ poll_cb->num_fds++;
+ }
+ }
+ }
+ if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+
+ case MM_CAMERA_PIPE_CMD_COMMIT:
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+ case MM_CAMERA_PIPE_CMD_EXIT:
+ default:
+ mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_fn
+ *
+ * DESCRIPTION: polling thread routine
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+ int rc = 0, i;
+
+ if (NULL == poll_cb) {
+ LOGE("poll_cb is NULL!\n");
+ return NULL;
+ }
+ LOGD("poll type = %d, num_fd = %d poll_cb = %p\n",
+ poll_cb->poll_type, poll_cb->num_fds,poll_cb);
+ do {
+ for(i = 0; i < poll_cb->num_fds; i++) {
+ poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+ }
+
+ rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
+ if(rc > 0) {
+ if ((poll_cb->poll_fds[0].revents & POLLIN) &&
+ (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
+ /* if we have data on pipe, we only process pipe in this iteration */
+ LOGD("cmd received on pipe\n");
+ mm_camera_poll_proc_pipe(poll_cb);
+ } else {
+ for(i=1; i<poll_cb->num_fds; i++) {
+ /* Checking for ctrl events */
+ if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+ (poll_cb->poll_fds[i].revents & POLLPRI)) {
+ LOGD("mm_camera_evt_notify\n");
+ if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+ poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+ }
+ }
+
+ if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
+ (poll_cb->poll_fds[i].revents & POLLIN) &&
+ (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
+ LOGD("mm_stream_data_notify\n");
+ if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+ poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+ }
+ }
+ }
+ }
+ } else {
+ /* in error case sleep 10 us and then continue. hard coded here */
+ usleep(10);
+ continue;
+ }
+ } while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread
+ *
+ * DESCRIPTION: polling thread entry function
+ *
+ * PARAMETERS :
+ * @data : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void *mm_camera_poll_thread(void *data)
+{
+ mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
+
+ mm_camera_cmd_thread_name(poll_cb->threadName);
+ /* add pipe read fd into poll first */
+ poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
+
+ mm_camera_poll_sig_done(poll_cb);
+ mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
+ return mm_camera_poll_fn(poll_cb);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread
+ *
+ * DESCRIPTION: notify the polling thread that entries for polling fd have
+ * been updated
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_notify_entries_updated(mm_camera_poll_thread_t * poll_cb)
+{
+ /* send poll entries updated signal to poll thread */
+ return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread_commit_updates
+ *
+ * DESCRIPTION: sync with all previously pending async updates
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_commit_updates(mm_camera_poll_thread_t * poll_cb)
+{
+ return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_COMMIT);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread_add_poll_fd
+ *
+ * DESCRIPTION: add a new fd into polling thread
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @handler : stream handle if channel data polling thread,
+ * 0 if event polling thread
+ * @fd : file descriptor need to be added into polling thread
+ * @notify_cb : callback function to handle if any notify from fd
+ * @userdata : user data ptr
+ * @call_type : Whether its Synchronous or Asynchronous call
+ *
+ * RETURN : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_add_poll_fd(mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ int32_t fd,
+ mm_camera_poll_notify_t notify_cb,
+ void* userdata,
+ mm_camera_call_type_t call_type)
+{
+ int32_t rc = -1;
+ uint8_t idx = 0;
+
+ if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+ /* get stream idx from handler if CH type */
+ idx = mm_camera_util_get_index_by_handler(handler);
+ } else {
+ /* for EVT type, only idx=0 is valid */
+ idx = 0;
+ }
+
+ if (MAX_STREAM_NUM_IN_BUNDLE > idx) {
+ poll_cb->poll_entries[idx].fd = fd;
+ poll_cb->poll_entries[idx].handler = handler;
+ poll_cb->poll_entries[idx].notify_cb = notify_cb;
+ poll_cb->poll_entries[idx].user_data = userdata;
+ /* send poll entries updated signal to poll thread */
+ if (call_type == mm_camera_sync_call ) {
+ rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+ } else {
+ rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+ }
+ } else {
+ LOGE("invalid handler %d (%d)", handler, idx);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread_del_poll_fd
+ *
+ * DESCRIPTION: delete a fd from polling thread
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @handler : stream handle if channel data polling thread,
+ * 0 if event polling thread
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_del_poll_fd(mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ mm_camera_call_type_t call_type)
+{
+ int32_t rc = -1;
+ uint8_t idx = 0;
+
+ if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+ /* get stream idx from handler if CH type */
+ idx = mm_camera_util_get_index_by_handler(handler);
+ } else {
+ /* for EVT type, only idx=0 is valid */
+ idx = 0;
+ }
+
+ if ((MAX_STREAM_NUM_IN_BUNDLE > idx) &&
+ (handler == poll_cb->poll_entries[idx].handler)) {
+ /* reset poll entry */
+ poll_cb->poll_entries[idx].fd = -1; /* set fd to invalid */
+ poll_cb->poll_entries[idx].handler = 0;
+ poll_cb->poll_entries[idx].notify_cb = NULL;
+
+ /* send poll entries updated signal to poll thread */
+ if (call_type == mm_camera_sync_call ) {
+ rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+ } else {
+ rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+ }
+ } else {
+ if ((MAX_STREAM_NUM_IN_BUNDLE <= idx) ||
+ (poll_cb->poll_entries[idx].handler != 0)) {
+ LOGE("invalid handler %d (%d)", poll_cb->poll_entries[idx].handler,
+ idx);
+ rc = -1;
+ } else {
+ LOGW("invalid handler %d (%d)", handler, idx);
+ rc = 0;
+ }
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
+ mm_camera_poll_thread_type_t poll_type)
+{
+ int32_t rc = 0;
+ size_t i = 0, cnt = 0;
+ poll_cb->poll_type = poll_type;
+
+ //Initialize poll_fds
+ cnt = sizeof(poll_cb->poll_fds) / sizeof(poll_cb->poll_fds[0]);
+ for (i = 0; i < cnt; i++) {
+ poll_cb->poll_fds[i].fd = -1;
+ }
+ //Initialize poll_entries
+ cnt = sizeof(poll_cb->poll_entries) / sizeof(poll_cb->poll_entries[0]);
+ for (i = 0; i < cnt; i++) {
+ poll_cb->poll_entries[i].fd = -1;
+ }
+ //Initialize pipe fds
+ poll_cb->pfds[0] = -1;
+ poll_cb->pfds[1] = -1;
+ rc = pipe(poll_cb->pfds);
+ if(rc < 0) {
+ LOGE("pipe open rc=%d\n", rc);
+ return -1;
+ }
+
+ poll_cb->timeoutms = -1; /* Infinite seconds */
+
+ LOGD("poll_type = %d, read fd = %d, write fd = %d timeout = %d",
+ poll_cb->poll_type,
+ poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
+
+ pthread_mutex_init(&poll_cb->mutex, NULL);
+ pthread_cond_init(&poll_cb->cond_v, NULL);
+
+ /* launch the thread */
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = 0;
+ pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+ if(!poll_cb->status) {
+ pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+ }
+
+ pthread_mutex_unlock(&poll_cb->mutex);
+ LOGD("End");
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb)
+{
+ int32_t rc = 0;
+ if(MM_CAMERA_POLL_TASK_STATE_STOPPED == poll_cb->state) {
+ LOGE("err, poll thread is not running.\n");
+ return rc;
+ }
+
+ /* send exit signal to poll thread */
+ mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+ /* wait until poll thread exits */
+ if (pthread_join(poll_cb->pid, NULL) != 0) {
+ LOGD("pthread dead already\n");
+ }
+
+ /* close pipe */
+ if(poll_cb->pfds[0] >= 0) {
+ close(poll_cb->pfds[0]);
+ }
+ if(poll_cb->pfds[1] >= 0) {
+ close(poll_cb->pfds[1]);
+ }
+
+ pthread_mutex_destroy(&poll_cb->mutex);
+ pthread_cond_destroy(&poll_cb->cond_v);
+ memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+ poll_cb->pfds[0] = -1;
+ poll_cb->pfds[1] = -1;
+ return rc;
+}
+
+static void *mm_camera_cmd_thread(void *data)
+{
+ int running = 1;
+ int ret;
+ mm_camera_cmd_thread_t *cmd_thread =
+ (mm_camera_cmd_thread_t *)data;
+ mm_camera_cmdcb_t* node = NULL;
+
+ mm_camera_cmd_thread_name(cmd_thread->threadName);
+ do {
+ do {
+ ret = cam_sem_wait(&cmd_thread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ /* we got notified about new cmd avail in cmd queue */
+ node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+ while (node != NULL) {
+ switch (node->cmd_type) {
+ case MM_CAMERA_CMD_TYPE_EVT_CB:
+ case MM_CAMERA_CMD_TYPE_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY:
+ case MM_CAMERA_CMD_TYPE_START_ZSL:
+ case MM_CAMERA_CMD_TYPE_STOP_ZSL:
+ case MM_CAMERA_CMD_TYPE_GENERAL:
+ case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE:
+ if (NULL != cmd_thread->cb) {
+ cmd_thread->cb(node, cmd_thread->user_data);
+ }
+ break;
+ case MM_CAMERA_CMD_TYPE_EXIT:
+ default:
+ running = 0;
+ break;
+ }
+ free(node);
+ node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+ } /* (node != NULL) */
+ } while (running);
+ return NULL;
+}
+
+int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
+ mm_camera_cmd_cb_t cb,
+ void* user_data)
+{
+ int32_t rc = 0;
+
+ cam_sem_init(&cmd_thread->cmd_sem, 0);
+ cam_sem_init(&cmd_thread->sync_sem, 0);
+ cam_queue_init(&cmd_thread->cmd_queue);
+ cmd_thread->cb = cb;
+ cmd_thread->user_data = user_data;
+ cmd_thread->is_active = TRUE;
+
+ /* launch the thread */
+ pthread_create(&cmd_thread->cmd_pid,
+ NULL,
+ mm_camera_cmd_thread,
+ (void *)cmd_thread);
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_name(const char* name)
+{
+ int32_t rc = 0;
+ /* name the thread */
+ if (name && strlen(name))
+ prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+ return rc;
+}
+
+
+int32_t mm_camera_cmd_thread_stop(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_camera_cmdcb_t");
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_EXIT;
+
+ cam_queue_enq(&cmd_thread->cmd_queue, node);
+ cam_sem_post(&cmd_thread->cmd_sem);
+
+ /* wait until cmd thread exits */
+ if (pthread_join(cmd_thread->cmd_pid, NULL) != 0) {
+ LOGD("pthread dead already\n");
+ }
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_destroy(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ cam_queue_deinit(&cmd_thread->cmd_queue);
+ cam_sem_destroy(&cmd_thread->cmd_sem);
+ cam_sem_destroy(&cmd_thread->sync_sem);
+ memset(cmd_thread, 0, sizeof(mm_camera_cmd_thread_t));
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ rc = mm_camera_cmd_thread_stop(cmd_thread);
+ if (0 == rc) {
+ rc = mm_camera_cmd_thread_destroy(cmd_thread);
+ }
+ return rc;
+}