aboutsummaryrefslogtreecommitdiff
path: root/camera/QCamera2/stack
diff options
context:
space:
mode:
authorPrateek Chaubey <chaubeyprateek@gmail.com>2018-01-07 20:55:14 +0530
committerDavide Garberi <dade.garberi@gmail.com>2018-01-19 14:09:15 +0100
commit6616278131edd80a12545085e06ee6b0e0a0a788 (patch)
tree0aef88ed11809a9d67f6abe4dc2ff782a14737e2 /camera/QCamera2/stack
parentcc4ccf34871da343111bf68d16ba4e4c67cac1dc (diff)
msm8996-common: zuk: Import OSS Camera HAL
Tag: LA.HB.1.3.2-32600-8x96.0 Signed-off-by: Davide Garberi <dade.garberi@gmail.com>
Diffstat (limited to 'camera/QCamera2/stack')
-rw-r--r--camera/QCamera2/stack/Android.mk5
-rw-r--r--camera/QCamera2/stack/common/cam_intf.h1067
-rw-r--r--camera/QCamera2/stack/common/cam_list.h84
-rw-r--r--camera/QCamera2/stack/common/cam_queue.h134
-rw-r--r--camera/QCamera2/stack/common/cam_semaphore.h88
-rw-r--r--camera/QCamera2/stack/common/cam_types.h2753
-rw-r--r--camera/QCamera2/stack/common/mm_camera_interface.h923
-rw-r--r--camera/QCamera2/stack/common/mm_jpeg_interface.h408
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/Android.mk63
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h767
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h134
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h76
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c2397
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c3639
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c2052
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c294
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c4581
-rw-r--r--camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c698
-rw-r--r--camera/QCamera2/stack/mm-camera-test/Android.mk193
-rw-r--r--camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h533
-rw-r--r--camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h68
-rw-r--r--camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h38
-rw-r--r--camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h439
-rw-r--r--camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h113
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c2404
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c291
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c1933
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c2047
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c1313
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c168
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c346
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c349
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c711
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c879
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c695
-rw-r--r--camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c258
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/Android.mk82
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h539
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h55
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h127
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h105
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h45
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c3788
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c652
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c409
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c206
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c414
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c186
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c1185
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c301
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk87
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c776
-rw-r--r--camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c479
-rw-r--r--camera/QCamera2/stack/mm-lib2d-interface/Android.mk39
-rw-r--r--camera/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h209
-rw-r--r--camera/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c604
-rw-r--r--camera/QCamera2/stack/mm-lib2d-interface/test/Android.mk37
-rw-r--r--camera/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c543
58 files changed, 43809 insertions, 0 deletions
diff --git a/camera/QCamera2/stack/Android.mk b/camera/QCamera2/stack/Android.mk
new file mode 100644
index 0000000..a357417
--- /dev/null
+++ b/camera/QCamera2/stack/Android.mk
@@ -0,0 +1,5 @@
+LOCAL_PATH:= $(call my-dir)
+include $(LOCAL_PATH)/mm-camera-interface/Android.mk
+include $(LOCAL_PATH)/mm-jpeg-interface/Android.mk
+include $(LOCAL_PATH)/mm-jpeg-interface/test/Android.mk
+include $(LOCAL_PATH)/mm-camera-test/Android.mk
diff --git a/camera/QCamera2/stack/common/cam_intf.h b/camera/QCamera2/stack/common/cam_intf.h
new file mode 100644
index 0000000..9eb52e9
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_intf.h
@@ -0,0 +1,1067 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+// System dependencies
+#include <string.h>
+#include <media/msmb_isp.h>
+
+// Camera dependencies
+#include "cam_types.h"
+
+#define CAM_PRIV_IOCTL_BASE (V4L2_CID_PRIVATE_BASE + MSM_CAMERA_PRIV_CMD_MAX)
+typedef enum {
+ /* session based parameters */
+ CAM_PRIV_PARM = CAM_PRIV_IOCTL_BASE,
+ /* session based action: do auto focus.*/
+ CAM_PRIV_DO_AUTO_FOCUS,
+ /* session based action: cancel auto focus.*/
+ CAM_PRIV_CANCEL_AUTO_FOCUS,
+ /* session based action: prepare for snapshot.*/
+ CAM_PRIV_PREPARE_SNAPSHOT,
+ /* sync stream info.*/
+ CAM_PRIV_STREAM_INFO_SYNC,
+ /* stream based parameters*/
+ CAM_PRIV_STREAM_PARM,
+ /* start ZSL snapshot.*/
+ CAM_PRIV_START_ZSL_SNAPSHOT,
+ /* stop ZSL snapshot.*/
+ CAM_PRIV_STOP_ZSL_SNAPSHOT,
+ /* event for related sensors synchronization. */
+ CAM_PRIV_SYNC_RELATED_SENSORS,
+ /* flush */
+ CAM_PRIV_FLUSH
+} cam_private_ioctl_enum_t;
+
+typedef enum {
+ /* start syncing for related cameras */
+ CAM_SYNC_RELATED_SENSORS_ON = 1,
+ /* stop syncing for related cameras */
+ CAM_SYNC_RELATED_SENSORS_OFF
+} cam_sync_related_sensors_control_t;
+
+typedef enum {
+ /* Driving camera of the related camera sub-system */
+ /* Certain features are enabled only for primary camera
+ such as display mode for preview, autofocus etc
+ In certain configurations for eg. when optical zoom
+ limit is reached, Aux Camera would become
+ the driving camera and there will be role switch.*/
+ CAM_MODE_PRIMARY = 0,
+ /* Non-driving camera of the related camera sub-system
+ no display mode set for secondary camera */
+ CAM_MODE_SECONDARY
+} cam_sync_mode_t;
+
+/* Payload for sending bundling info to backend */
+typedef struct {
+ cam_sync_related_sensors_control_t sync_control;
+ cam_sync_type_t type;
+ cam_sync_mode_t mode;
+ /* session Id of the other camera session
+ Linking will be done with this session in the
+ backend */
+ uint32_t related_sensor_session_id;
+ uint8_t is_frame_sync_enabled;
+}cam_sync_related_sensors_event_info_t;
+
+/* Related camera sensor specific calibration data */
+// Align bytes according to API document.
+#pragma pack(2)
+typedef struct {
+ /* Focal length in pixels @ calibration resolution.*/
+ float normalized_focal_length;
+ /* Native sensor resolution W that was used to capture calibration image */
+ uint16_t native_sensor_resolution_width;
+ /* Native sensor resolution H that was used to capture calibration image */
+ uint16_t native_sensor_resolution_height;
+ /* Image size W used internally by calibration tool */
+ uint16_t calibration_sensor_resolution_width;
+ /* Image size H used internally by calibration tool */
+ uint16_t calibration_sensor_resolution_height;
+ /* Focal length ratio @ Calibration */
+ float focal_length_ratio;
+}cam_related_sensor_calibration_data_t;
+#pragma pack()
+
+/* Related Camera System Calibration data
+ Calibration data for the entire related cam sub-system is
+ in a shared EEPROM. We have 2 fields which are specific to
+ each sensor followed by a set of common calibration of the
+ entire related cam system*/
+// Align bytes according to API document.
+#pragma pack(2)
+typedef struct {
+ /* Version information */
+ uint32_t calibration_format_version;
+ /* Main Camera Sensor specific calibration */
+ cam_related_sensor_calibration_data_t main_cam_specific_calibration;
+ /* Aux Camera Sensor specific calibration */
+ cam_related_sensor_calibration_data_t aux_cam_specific_calibration;
+ /* Relative viewpoint matching matrix w.r.t Main */
+ float relative_rotation_matrix[RELCAM_CALIB_ROT_MATRIX_MAX];
+ /* Relative geometric surface description parameters */
+ float relative_geometric_surface_parameters[
+ RELCAM_CALIB_SURFACE_PARMS_MAX];
+ /* Relative offset of sensor center from optical axis along horizontal dimension */
+ float relative_principle_point_x_offset;
+ /* Relative offset of sensor center from optical axis along vertical dimension */
+ float relative_principle_point_y_offset;
+ /* 0=Main Camera is on the left of Aux; 1=Main Camera is on the right of Aux */
+ uint16_t relative_position_flag;
+ /* Camera separation in mm */
+ float relative_baseline_distance;
+ /* main sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
+ uint16_t main_sensor_mirror_flip_setting;
+ /* aux sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
+ uint16_t aux_sensor_mirror_flip_setting;
+ /* module orientation during cal: 0-sensors in landscape, 1-sensors in portrait */
+ uint16_t module_orientation_during_calibration;
+ /* cal images required rotation: 0-no, 1-90 degrees right, 2-90 degrees left */
+ uint16_t rotation_flag;
+ /* AEC sync OTP data */
+ /* AEC sync brightness ration. Fixed Point Q10*/
+ int16_t brightness_ratio;
+ /* Reference mono gain value obtained from setup stage and used during calibration stage */
+ /* Fixed Point Q10 */
+ int16_t ref_mono_gain;
+ /* Reference mono line count obtained from setup stage and used during calibration stage */
+ uint16_t ref_mono_linecount;
+ /* Reference bayer gain value obtained from setup stage and used during calibration stage */
+ /* Fixed Point Q10 */
+ int16_t ref_bayer_gain;
+ /* Reference bayer line count obtained from setup stage and used during calibration stage */
+ uint16_t ref_bayer_linecount;
+ /* Reference bayer color temperature */
+ uint16_t ref_bayer_color_temperature;
+ /* Reserved for future use */
+ float reserved[RELCAM_CALIB_RESERVED_MAX];
+} cam_related_system_calibration_data_t;
+#pragma pack()
+
+typedef struct {
+ uint32_t default_sensor_flip;
+ uint32_t sensor_mount_angle;
+ cam_related_system_calibration_data_t otp_calibration_data;
+} cam_jpeg_metadata_t;
+
+/* capability struct definition for HAL 1*/
+typedef struct{
+ cam_hal_version_t version;
+
+ cam_position_t position; /* sensor position: front, back */
+
+ uint8_t auto_hdr_supported;
+
+ uint16_t isWnrSupported;
+ /* supported iso modes */
+ size_t supported_iso_modes_cnt;
+ cam_iso_mode_type supported_iso_modes[CAM_ISO_MODE_MAX];
+
+ /* supported flash modes */
+ size_t supported_flash_modes_cnt;
+ cam_flash_mode_t supported_flash_modes[CAM_FLASH_MODE_MAX];
+
+ size_t zoom_ratio_tbl_cnt; /* table size for zoom ratios */
+ uint32_t zoom_ratio_tbl[MAX_ZOOMS_CNT]; /* zoom ratios table */
+
+ /* supported effect modes */
+ size_t supported_effects_cnt;
+ cam_effect_mode_type supported_effects[CAM_EFFECT_MODE_MAX];
+
+ /* supported scene modes */
+ size_t supported_scene_modes_cnt;
+ cam_scene_mode_type supported_scene_modes[CAM_SCENE_MODE_MAX];
+
+ /* supported auto exposure modes */
+ size_t supported_aec_modes_cnt;
+ cam_auto_exposure_mode_type supported_aec_modes[CAM_AEC_MODE_MAX];
+
+ size_t fps_ranges_tbl_cnt; /* fps ranges table size */
+ cam_fps_range_t fps_ranges_tbl[MAX_SIZES_CNT]; /* fps ranges table */
+
+ /* supported antibanding modes */
+ size_t supported_antibandings_cnt;
+ cam_antibanding_mode_type supported_antibandings[CAM_ANTIBANDING_MODE_MAX];
+
+ /* supported white balance modes */
+ size_t supported_white_balances_cnt;
+ cam_wb_mode_type supported_white_balances[CAM_WB_MODE_MAX];
+
+ /* Capability list of supported insensor HDR types
+ * Backend is expected to fill in all the supported types and set appropriate
+ * count, see cam_sensor_hdr_type_t for valid types
+ */
+ size_t supported_sensor_hdr_types_cnt;
+ cam_sensor_hdr_type_t supported_sensor_hdr_types[CAM_SENSOR_HDR_MAX];
+
+ /* supported manual wb cct */
+ int32_t min_wb_cct;
+ int32_t max_wb_cct;
+
+ /* supported manual wb rgb gains */
+ float min_wb_gain;
+ float max_wb_gain;
+
+ /* supported focus modes */
+ size_t supported_focus_modes_cnt;
+ cam_focus_mode_type supported_focus_modes[CAM_FOCUS_MODE_MAX];
+
+ /* supported manual focus position */
+ float min_focus_pos[CAM_MANUAL_FOCUS_MODE_MAX];
+ float max_focus_pos[CAM_MANUAL_FOCUS_MODE_MAX];
+
+ int32_t exposure_compensation_min; /* min value of exposure compensation index */
+ int32_t exposure_compensation_max; /* max value of exposure compensation index */
+ int32_t exposure_compensation_default; /* default value of exposure compensation index */
+ float exposure_compensation_step;
+ cam_rational_type_t exp_compensation_step; /* exposure compensation step value */
+
+ uint8_t video_stablization_supported; /* flag id video stablization is supported */
+
+ size_t picture_sizes_tbl_cnt; /* picture sizes table size */
+ cam_dimension_t picture_sizes_tbl[MAX_SIZES_CNT]; /* picture sizes table */
+ /* The minimum frame duration that is supported for each
+ * resolution in availableProcessedSizes. Should correspond
+ * to the frame duration when only that processed stream
+ * is active, with all processing set to FAST */
+ int64_t picture_min_duration[MAX_SIZES_CNT];
+
+ /* capabilities specific to HAL 1 */
+
+ int32_t modes_supported; /* mask of modes supported: 2D, 3D */
+ uint32_t sensor_mount_angle; /* sensor mount angle */
+
+ float focal_length; /* focal length */
+ float hor_view_angle; /* horizontal view angle */
+ float ver_view_angle; /* vertical view angle */
+
+ size_t preview_sizes_tbl_cnt; /* preview sizes table size */
+ cam_dimension_t preview_sizes_tbl[MAX_SIZES_CNT]; /* preiew sizes table */
+
+ size_t video_sizes_tbl_cnt; /* video sizes table size */
+ cam_dimension_t video_sizes_tbl[MAX_SIZES_CNT]; /* video sizes table */
+
+
+ size_t livesnapshot_sizes_tbl_cnt; /* livesnapshot sizes table size */
+ cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT]; /* livesnapshot sizes table */
+
+ size_t vhdr_livesnapshot_sizes_tbl_cnt; /* vhdr_livesnapshot sizes table size */
+ cam_dimension_t vhdr_livesnapshot_sizes_tbl[MAX_SIZES_CNT]; /* vhdr_livesnapshot sizes table */
+
+ size_t hfr_tbl_cnt; /* table size for HFR */
+ cam_hfr_info_t hfr_tbl[CAM_HFR_MODE_MAX]; /* HFR table */
+
+ size_t zzhdr_sizes_tbl_cnt; /* Number of resolutions in zzHDR mode*/
+ cam_dimension_t zzhdr_sizes_tbl[MAX_SIZES_CNT]; /* Table for ZZHDR supported sizes */
+
+ size_t supported_quadra_cfa_dim_cnt; /* Number of resolutions in Quadra CFA mode */
+ cam_dimension_t quadra_cfa_dim[MAX_SIZES_CNT]; /* Table for Quadra CFA supported sizes */
+ cam_format_t quadra_cfa_format; /* Quadra CFA output format */
+ uint32_t is_remosaic_lib_present; /* Flag indicating if remosaic lib present */
+
+ /* supported preview formats */
+ size_t supported_preview_fmt_cnt;
+ cam_format_t supported_preview_fmts[CAM_FORMAT_MAX];
+
+ /* supported picture formats */
+ size_t supported_picture_fmt_cnt;
+ cam_format_t supported_picture_fmts[CAM_FORMAT_MAX];
+
+ uint8_t max_downscale_factor;
+
+ /* dimension and supported output format of raw dump from camif */
+ size_t supported_raw_dim_cnt;
+ cam_dimension_t raw_dim[MAX_SIZES_CNT];
+ size_t supported_raw_fmt_cnt;
+ cam_format_t supported_raw_fmts[CAM_FORMAT_MAX];
+ /* The minimum frame duration that is supported for above
+ raw resolution */
+ int64_t raw_min_duration[MAX_SIZES_CNT];
+
+ /* 3A version*/
+ cam_q3a_version_t q3a_version;
+ /* supported focus algorithms */
+ size_t supported_focus_algos_cnt;
+ cam_focus_algorithm_type supported_focus_algos[CAM_FOCUS_ALGO_MAX];
+
+
+ uint8_t auto_wb_lock_supported; /* flag if auto white balance lock is supported */
+ uint8_t zoom_supported; /* flag if zoom is supported */
+ uint8_t smooth_zoom_supported; /* flag if smooth zoom is supported */
+ uint8_t auto_exposure_lock_supported; /* flag if auto exposure lock is supported */
+ uint8_t video_snapshot_supported; /* flag if video snapshot is supported */
+
+ uint8_t max_num_roi; /* max number of roi can be detected */
+ uint8_t max_num_focus_areas; /* max num of focus areas */
+ uint8_t max_num_metering_areas; /* max num opf metering areas */
+ uint8_t max_zoom_step; /* max zoom step value */
+
+ /* QCOM specific control */
+ cam_control_range_t brightness_ctrl; /* brightness */
+ cam_control_range_t sharpness_ctrl; /* sharpness */
+ cam_control_range_t contrast_ctrl; /* contrast */
+ cam_control_range_t saturation_ctrl; /* saturation */
+ cam_control_range_t sce_ctrl; /* skintone enhancement factor */
+
+ /* QCOM HDR specific control. Indicates number of frames and exposure needs for the frames */
+ cam_hdr_bracketing_info_t hdr_bracketing_setting;
+
+ cam_feature_mask_t qcom_supported_feature_mask; /* mask of qcom specific features supported:
+ * such as CAM_QCOM_FEATURE_SUPPORTED_FACE_DETECTION*/
+ cam_padding_info_t padding_info; /* padding information from PP */
+ uint32_t min_num_pp_bufs; /* minimum number of buffers needed by postproc module */
+ cam_format_t rdi_mode_stream_fmt; /* stream format supported in rdi mode */
+
+ /* capabilities specific to HAL 3 */
+
+ float min_focus_distance;
+ float hyper_focal_distance;
+
+ float focal_lengths[CAM_FOCAL_LENGTHS_MAX];
+ uint8_t focal_lengths_count;
+
+ /* Needs to be regular f number instead of APEX */
+ float apertures[CAM_APERTURES_MAX];
+ uint8_t apertures_count;
+
+ float filter_densities[CAM_FILTER_DENSITIES_MAX];
+ uint8_t filter_densities_count;
+
+ uint8_t optical_stab_modes[CAM_OPT_STAB_MAX];
+ uint8_t optical_stab_modes_count;
+
+ cam_dimension_t lens_shading_map_size;
+
+ cam_dimension_t geo_correction_map_size;
+ float geo_correction_map[2 * 3 * CAM_MAX_MAP_WIDTH *
+ CAM_MAX_MAP_HEIGHT];
+
+ float lens_position[3];
+
+ /* nano seconds */
+ int64_t exposure_time_range[EXPOSURE_TIME_RANGE_CNT];
+
+ /* nano seconds */
+ int64_t max_frame_duration;
+
+ cam_color_filter_arrangement_t color_arrangement;
+ uint8_t num_color_channels;
+
+ /* parameters required to calculate S and O co-efficients */
+ double gradient_S;
+ double offset_S;
+ double gradient_O;
+ double offset_O;
+
+ float sensor_physical_size[SENSOR_PHYSICAL_SIZE_CNT];
+
+ /* Dimensions of full pixel array, possibly including
+ black calibration pixels */
+ cam_dimension_t pixel_array_size;
+ /* Area of raw data which corresponds to only active
+ pixels; smaller or equal to pixelArraySize. */
+ cam_rect_t active_array_size;
+
+ /* Maximum raw value output by sensor */
+ int32_t white_level;
+
+ /* A fixed black level offset for each of the Bayer
+ mosaic channels */
+ int32_t black_level_pattern[BLACK_LEVEL_PATTERN_CNT];
+
+ /* Time taken before flash can fire again in nano secs */
+ int64_t flash_charge_duration;
+
+ /* flash firing power */
+ size_t supported_flash_firing_level_cnt;
+ cam_format_t supported_firing_levels[CAM_FLASH_FIRING_LEVEL_MAX];
+
+ /* Flash Firing Time */
+ int64_t flash_firing_time;
+
+ /* Flash Ciolor Temperature */
+ uint8_t flash_color_temp;
+
+ /* Flash max Energy */
+ uint8_t flash_max_energy;
+
+ /* Maximum number of supported points in the tonemap
+ curve */
+ int32_t max_tone_map_curve_points;
+
+ /* supported formats */
+ size_t supported_scalar_format_cnt;
+ cam_format_t supported_scalar_fmts[CAM_FORMAT_MAX];
+
+ uint32_t max_face_detection_count;
+
+ uint8_t histogram_supported;
+ /* Number of histogram buckets supported */
+ int32_t histogram_size;
+ /* Maximum value possible for a histogram bucket */
+ int32_t max_histogram_count;
+
+ cam_dimension_t sharpness_map_size;
+
+ /* Maximum value possible for a sharpness map region */
+ int32_t max_sharpness_map_value;
+
+ /*Autoexposure modes for camera 3 api*/
+ size_t supported_ae_modes_cnt;
+ cam_ae_mode_type supported_ae_modes[CAM_AE_MODE_MAX];
+
+
+ cam_sensitivity_range_t sensitivity_range;
+ int32_t max_analog_sensitivity;
+
+ /* ISP digital gain */
+ cam_sensitivity_range_t isp_sensitivity_range;
+
+ /* picture sizes need scale*/
+ cam_scene_mode_overrides_t scene_mode_overrides[CAM_SCENE_MODE_MAX];
+ size_t scale_picture_sizes_cnt;
+ cam_dimension_t scale_picture_sizes[MAX_SCALE_SIZES_CNT];
+
+ uint8_t flash_available;
+
+ cam_rational_type_t base_gain_factor; /* sensor base gain factor */
+ /* AF Bracketing info */
+ cam_af_bracketing_t ubifocus_af_bracketing_need;
+ cam_af_bracketing_t refocus_af_bracketing_need;
+ /* opti Zoom info */
+ cam_opti_zoom_t opti_zoom_settings_need;
+ /* still more info */
+ cam_still_more_t stillmore_settings_need;
+ /* chroma flash info */
+ cam_chroma_flash_t chroma_flash_settings_need;
+
+ cam_rational_type_t forward_matrix[3][3];
+ cam_rational_type_t color_transform[3][3];
+
+ uint8_t focus_dist_calibrated;
+ uint8_t supported_test_pattern_modes_cnt;
+ cam_test_pattern_mode_t supported_test_pattern_modes[MAX_TEST_PATTERN_CNT];
+
+ int64_t stall_durations[MAX_SIZES_CNT];
+
+ cam_illuminat_t reference_illuminant1;
+ cam_illuminat_t reference_illuminant2;
+
+ int64_t jpeg_stall_durations[MAX_SIZES_CNT];
+ int64_t raw16_stall_durations[MAX_SIZES_CNT];
+ cam_rational_type_t forward_matrix1[FORWARD_MATRIX_ROWS][FORWARD_MATRIX_COLS];
+ cam_rational_type_t forward_matrix2[FORWARD_MATRIX_ROWS][FORWARD_MATRIX_COLS];
+ cam_rational_type_t color_transform1[COLOR_TRANSFORM_ROWS][COLOR_TRANSFORM_COLS];
+ cam_rational_type_t color_transform2[COLOR_TRANSFORM_ROWS][COLOR_TRANSFORM_COLS];
+ cam_rational_type_t calibration_transform1[CAL_TRANSFORM_ROWS][CAL_TRANSFORM_COLS];
+ cam_rational_type_t calibration_transform2[CAL_TRANSFORM_ROWS][CAL_TRANSFORM_COLS];
+ uint16_t isCacSupported;
+
+ cam_opaque_raw_format_t opaque_raw_fmt;
+
+ /* true Portrait info */
+ cam_true_portrait_t true_portrait_settings_need;
+
+ /* Sensor type information */
+ cam_sensor_type_t sensor_type;
+
+ cam_aberration_mode_t aberration_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
+ uint32_t aberration_modes_count;
+
+ /* Can the sensor timestamp be compared to
+ * timestamps from other sub-systems (gyro, accelerometer etc.) */
+ uint8_t isTimestampCalibrated;
+
+ /* Max size supported by ISP viewfinder path */
+ cam_dimension_t max_viewfinder_size;
+
+ /* Analysis buffer requirements */
+ cam_analysis_info_t analysis_info[CAM_ANALYSIS_INFO_MAX];
+
+ /* This is set to 'true' if sensor cannot guarantee per frame control */
+ /* Default value of this capability is 'false' indicating per-frame */
+ /* control is supported */
+ uint8_t no_per_frame_control_support;
+
+ /* EIS information */
+ uint8_t supported_is_types_cnt;
+ uint32_t supported_is_types[IS_TYPE_MAX];
+ /*for each type, specify the margin needed. Margin will be
+ the decimal representation of a percentage
+ ex: 10% margin = 0.1 */
+ float supported_is_type_margins[IS_TYPE_MAX];
+
+ /* Max cpp batch size */
+ uint8_t max_batch_bufs_supported;
+ uint32_t buf_alignment;
+ uint32_t min_stride;
+ uint32_t min_scanline;
+ uint8_t flash_dev_name[QCAMERA_MAX_FILEPATH_LENGTH];
+ uint8_t eeprom_version_info[MAX_EEPROM_VERSION_INFO_LEN];
+
+ /* maximum pixel bandwidth shared between cameras */
+ uint64_t max_pixel_bandwidth;
+
+ /* Array of K integers, where K%4==0,
+ as a list of rectangles in the pixelArray co-ord system
+ left, top, right, bottom */
+ int32_t optical_black_regions[MAX_OPTICAL_BLACK_REGIONS * 4];
+ /* Count is K/4 */
+ uint8_t optical_black_region_count;
+
+ /* hot pixel */
+ uint8_t hotPixel_mode;
+ uint32_t hotPixel_count;
+ cam_coordinate_type_t hotPixelMap[512];
+
+ /* supported instant capture/AEC convergence modes */
+ size_t supported_instant_aec_modes_cnt;
+ cam_aec_convergence_type supported_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
+
+ /* Dual cam calibration data */
+ cam_related_system_calibration_data_t related_cam_calibration;
+} cam_capability_t;
+
+typedef enum {
+ CAM_STREAM_PARAM_TYPE_DO_REPROCESS = CAM_INTF_PARM_DO_REPROCESS,
+ CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO = CAM_INTF_PARM_SET_BUNDLE,
+ CAM_STREAM_PARAM_TYPE_SET_FLIP = CAM_INTF_PARM_STREAM_FLIP,
+ CAM_STREAM_PARAM_TYPE_GET_OUTPUT_CROP = CAM_INTF_PARM_GET_OUTPUT_CROP,
+ CAM_STREAM_PARAM_TYPE_GET_IMG_PROP = CAM_INTF_PARM_GET_IMG_PROP,
+ CAM_STREAM_PARAM_TYPE_REQUEST_FRAMES = CAM_INTF_PARM_REQUEST_FRAMES,
+ CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE = CAM_INTF_PARM_REQUEST_OPS_MODE,
+ CAM_STREAM_PARAM_TYPE_MAX
+} cam_stream_param_type_e;
+
+typedef struct {
+ uint32_t buf_index; /* buf index to the source frame buffer that needs reprocess,
+ (assume buffer is already mapped)*/
+ uint32_t frame_idx; /* frame id of source frame to be reprocessed */
+ int32_t ret_val; /* return value from reprocess. Could have different meanings.
+ i.e., faceID in the case of face registration. */
+ uint8_t meta_present; /* if there is meta data associated with this reprocess frame */
+ uint32_t meta_stream_handle; /* meta data stream ID. only valid if meta_present != 0 */
+ uint32_t meta_buf_index; /* buf index to meta data buffer. only valid if meta_present != 0 */
+
+ /* opaque metadata required for reprocessing */
+ int32_t private_data[MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES];
+ cam_rect_t crop_rect;
+} cam_reprocess_param;
+
+typedef struct {
+ uint32_t flip_mask;
+} cam_flip_mode_t;
+
+#define IMG_NAME_SIZE 32
+typedef struct {
+ cam_rect_t crop; /* crop info for the image */
+ cam_dimension_t input; /* input dimension of the image */
+ cam_dimension_t output; /* output dimension of the image */
+ char name[IMG_NAME_SIZE]; /* optional name of the ext*/
+ cam_format_t format; /* image format */
+} cam_stream_img_prop_t;
+
+typedef struct {
+ uint8_t enableStream; /*0 – stop and 1-start */
+} cam_request_frames;
+
+typedef struct {
+ cam_stream_param_type_e type;
+ union {
+ cam_reprocess_param reprocess; /* do reprocess */
+ cam_bundle_config_t bundleInfo; /* set bundle info*/
+ cam_flip_mode_t flipInfo; /* flip mode */
+ cam_crop_data_t outputCrop; /* output crop for current frame */
+ cam_stream_img_prop_t imgProp; /* image properties of current frame */
+ cam_request_frames frameRequest; /*do TNR process*/
+ cam_perf_mode_t perf_mode; /*request operational mode*/
+ };
+} cam_stream_parm_buffer_t;
+
+/* stream info */
+typedef struct {
+ /* stream ID from server */
+ uint32_t stream_svr_id;
+
+ /* stream type */
+ cam_stream_type_t stream_type;
+
+ /* image format */
+ cam_format_t fmt;
+
+ /* image dimension */
+ cam_dimension_t dim;
+
+ /* buffer plane information, will be calc based on stream_type, fmt,
+ dim, and padding_info(from stream config). Info including:
+ offset_x, offset_y, stride, scanline, plane offset */
+ cam_stream_buf_plane_info_t buf_planes;
+
+ /* number of stream bufs will be allocated */
+ uint32_t num_bufs;
+
+ /* streaming type */
+ cam_streaming_mode_t streaming_mode;
+
+ /* num of frames needs to be generated.
+ * only valid when streaming_mode = CAM_STREAMING_MODE_BURST */
+ uint8_t num_of_burst;
+
+ /* num of frames in one batch.
+ * only valid when streaming_mode = CAM_STREAMING_MODE_BATCH */
+ cam_stream_user_buf_info_t user_buf_info;
+
+ /* stream specific pp config */
+ cam_pp_feature_config_t pp_config;
+
+ /* this section is valid if offline reprocess type stream */
+ cam_stream_reproc_config_t reprocess_config;
+
+ cam_stream_parm_buffer_t parm_buf; /* stream based parameters */
+
+ uint8_t dis_enable;
+
+ /* Image Stabilization type */
+ cam_is_type_t is_type;
+
+ /* Signifies Secure stream mode */
+ cam_stream_secure_t is_secure;
+
+ /* Preferred Performance mode */
+ cam_perf_mode_t perf_mode;
+
+ /* if frames will not be received */
+ uint8_t noFrameExpected;
+} cam_stream_info_t;
+
+/*****************************************************************************
+ * Code for Domain Socket Based Parameters *
+ ****************************************************************************/
+#define INCLUDE(PARAM_ID,DATATYPE,COUNT) \
+ DATATYPE member_variable_##PARAM_ID[ COUNT ]
+
+#define POINTER_OF_META(META_ID, TABLE_PTR) \
+ ((NULL != TABLE_PTR) ? \
+ (&TABLE_PTR->data.member_variable_##META_ID[ 0 ]) : (NULL))
+
+#define SIZE_OF_PARAM(META_ID, TABLE_PTR) \
+ sizeof(TABLE_PTR->data.member_variable_##META_ID)
+
+#define IF_META_AVAILABLE(META_TYPE, META_PTR_NAME, META_ID, TABLE_PTR) \
+ META_TYPE *META_PTR_NAME = \
+ (((NULL != TABLE_PTR) && (TABLE_PTR->is_valid[META_ID])) ? \
+ (&TABLE_PTR->data.member_variable_##META_ID[ 0 ]) : \
+ (NULL)); \
+ if (NULL != META_PTR_NAME) \
+
+#define ADD_SET_PARAM_ENTRY_TO_BATCH(TABLE_PTR, META_ID, DATA) \
+ ((NULL != TABLE_PTR) ? \
+ ((TABLE_PTR->data.member_variable_##META_ID[ 0 ] = DATA), \
+ (TABLE_PTR->is_valid[META_ID] = 1), (0)) : \
+ ((LOGE("Unable to set metadata TABLE_PTR:%p META_ID:%d", \
+ TABLE_PTR, META_ID)), (-1))) \
+
+#define ADD_SET_PARAM_ARRAY_TO_BATCH(TABLE_PTR, META_ID, PDATA, COUNT, RCOUNT) \
+{ \
+ if ((NULL != TABLE_PTR) && \
+ (0 < COUNT) && \
+ ((sizeof(TABLE_PTR->data.member_variable_##META_ID) / \
+ sizeof(TABLE_PTR->data.member_variable_##META_ID[ 0 ])) \
+ >= COUNT)) { \
+ for (size_t _i = 0; _i < COUNT ; _i++) { \
+ TABLE_PTR->data.member_variable_##META_ID[ _i ] = PDATA [ _i ]; \
+ } \
+ TABLE_PTR->is_valid[META_ID] = 1; \
+ RCOUNT = COUNT; \
+ } else { \
+ LOGE("Unable to set metadata TABLE_PTR:%p META_ID:%d COUNT:%zu", \
+ TABLE_PTR, META_ID, COUNT); \
+ RCOUNT = 0; \
+ } \
+}
+
+#define ADD_GET_PARAM_ENTRY_TO_BATCH(TABLE_PTR, META_ID) \
+{ \
+ if (NULL != TABLE_PTR) { \
+ TABLE_PTR->is_reqd[META_ID] = 1; \
+ } else { \
+ LOGE("Unable to get metadata TABLE_PTR:%p META_ID:%d", \
+ TABLE_PTR, META_ID); \
+ } \
+}
+
+#define READ_PARAM_ENTRY(TABLE_PTR, META_ID, DATA) \
+{ \
+ if (NULL != TABLE_PTR) { \
+ DATA = TABLE_PTR->data.member_variable_##META_ID[ 0 ]; \
+ } else { \
+ LOGE("Unable to read metadata TABLE_PTR:%p META_ID:%d", \
+ TABLE_PTR, META_ID); \
+ } \
+}
+
+/************************************
+* Custom parameter data definition
+*************************************/
+typedef struct {
+ /*CAM_CUSTOM_PARM_EXAMPLE is added to explain custom param procedure*/
+ INCLUDE(CAM_CUSTOM_PARM_EXAMPLE, int32_t, 1);
+} custom_parm_data_t;
+
+/************************************
+* Custom Parameter buffer definition
+*************************************/
+typedef struct {
+ union {
+ /* Hash table of 'is valid' flags */
+ uint8_t is_valid[CAM_CUSTOM_PARM_MAX];
+
+ /* Hash table of 'is required' flags for the GET PARAM */
+ uint8_t is_reqd[CAM_CUSTOM_PARM_MAX];
+ };
+ custom_parm_data_t data;
+} custom_parm_buffer_t;
+
+
+typedef struct {
+/**************************************************************************************
+ * ID from (cam_intf_metadata_type_t) DATATYPE COUNT
+ **************************************************************************************/
+ /* common between HAL1 and HAL3 */
+ INCLUDE(CAM_INTF_META_HISTOGRAM, cam_hist_stats_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_DETECTION, cam_face_detection_data_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_RECOG, cam_face_recog_data_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_BLINK, cam_face_blink_data_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_GAZE, cam_face_gaze_data_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_SMILE, cam_face_smile_data_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_LANDMARK, cam_face_landmarks_data_t, 1);
+ INCLUDE(CAM_INTF_META_FACE_CONTOUR, cam_face_contour_data_t, 1);
+ INCLUDE(CAM_INTF_META_AUTOFOCUS_DATA, cam_auto_focus_data_t, 1);
+ INCLUDE(CAM_INTF_META_CDS_DATA, cam_cds_data_t, 1);
+ INCLUDE(CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, uint32_t, 1);
+
+ /* Specific to HAl1 */
+ INCLUDE(CAM_INTF_META_CROP_DATA, cam_crop_data_t, 1);
+ INCLUDE(CAM_INTF_META_PREP_SNAPSHOT_DONE, int32_t, 1);
+ INCLUDE(CAM_INTF_META_GOOD_FRAME_IDX_RANGE, cam_frame_idx_range_t, 1);
+ INCLUDE(CAM_INTF_META_ASD_HDR_SCENE_DATA, cam_asd_hdr_scene_data_t, 1);
+ INCLUDE(CAM_INTF_META_ASD_SCENE_INFO, cam_asd_decision_t, 1);
+ INCLUDE(CAM_INTF_META_CURRENT_SCENE, cam_scene_mode_type, 1);
+ INCLUDE(CAM_INTF_META_AWB_INFO, cam_awb_params_t, 1);
+ INCLUDE(CAM_INTF_META_FOCUS_POSITION, cam_focus_pos_info_t, 1);
+ INCLUDE(CAM_INTF_META_CHROMATIX_LITE_ISP, cam_chromatix_lite_isp_t, 1);
+ INCLUDE(CAM_INTF_META_CHROMATIX_LITE_PP, cam_chromatix_lite_pp_t, 1);
+ INCLUDE(CAM_INTF_META_CHROMATIX_LITE_AE, cam_chromatix_lite_ae_stats_t, 1);
+ INCLUDE(CAM_INTF_META_CHROMATIX_LITE_AWB, cam_chromatix_lite_awb_stats_t, 1);
+ INCLUDE(CAM_INTF_META_CHROMATIX_LITE_AF, cam_chromatix_lite_af_stats_t, 1);
+ INCLUDE(CAM_INTF_META_CHROMATIX_LITE_ASD, cam_chromatix_lite_asd_stats_t, 1);
+ INCLUDE(CAM_INTF_BUF_DIVERT_INFO, cam_buf_divert_info_t, 1);
+
+ /* Specific to HAL3 */
+ INCLUDE(CAM_INTF_META_FRAME_NUMBER_VALID, int32_t, 1);
+ INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, int32_t, 1);
+ INCLUDE(CAM_INTF_META_FRAME_DROPPED, cam_frame_dropped_t, 1);
+ INCLUDE(CAM_INTF_META_FRAME_NUMBER, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_COLOR_CORRECT_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, cam_color_correct_matrix_t, 1);
+ INCLUDE(CAM_INTF_META_COLOR_CORRECT_GAINS, cam_color_correct_gains_t, 1);
+ INCLUDE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, cam_color_correct_matrix_t, 1);
+ INCLUDE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, cam_color_correct_gains_t, 1);
+ INCLUDE(CAM_INTF_META_AEC_ROI, cam_area_t, 1);
+ INCLUDE(CAM_INTF_META_AEC_STATE, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_FOCUS_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_MANUAL_FOCUS_POS, cam_manual_focus_parm_t, 1);
+ INCLUDE(CAM_INTF_META_AF_ROI, cam_area_t, 1);
+ INCLUDE(CAM_INTF_META_AF_STATE, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_WHITE_BALANCE, int32_t, 1);
+ INCLUDE(CAM_INTF_META_AWB_REGIONS, cam_area_t, 1);
+ INCLUDE(CAM_INTF_META_AWB_STATE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_BLACK_LEVEL_LOCK, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_EDGE_MODE, cam_edge_application_t, 1);
+ INCLUDE(CAM_INTF_META_FLASH_POWER, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_FLASH_FIRING_TIME, int64_t, 1);
+ INCLUDE(CAM_INTF_META_FLASH_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_FLASH_STATE, int32_t, 1);
+ INCLUDE(CAM_INTF_META_HOTPIXEL_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_LENS_APERTURE, float, 1);
+ INCLUDE(CAM_INTF_META_LENS_FILTERDENSITY, float, 1);
+ INCLUDE(CAM_INTF_META_LENS_FOCAL_LENGTH, float, 1);
+ INCLUDE(CAM_INTF_META_LENS_FOCUS_DISTANCE, float, 1);
+ INCLUDE(CAM_INTF_META_LENS_FOCUS_RANGE, float, 2);
+ INCLUDE(CAM_INTF_META_LENS_STATE, cam_af_lens_state_t, 1);
+ INCLUDE(CAM_INTF_META_LENS_OPT_STAB_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_VIDEO_STAB_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_LENS_FOCUS_STATE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_NOISE_REDUCTION_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_SCALER_CROP_REGION, cam_crop_region_t, 1);
+ INCLUDE(CAM_INTF_META_SCENE_FLICKER, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, int64_t, 1);
+ INCLUDE(CAM_INTF_META_SENSOR_FRAME_DURATION, int64_t, 1);
+ INCLUDE(CAM_INTF_META_SENSOR_SENSITIVITY, int32_t, 1);
+ INCLUDE(CAM_INTF_META_ISP_SENSITIVITY , int32_t, 1);
+ INCLUDE(CAM_INTF_META_SENSOR_TIMESTAMP, int64_t, 1);
+ INCLUDE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, int64_t, 1);
+ INCLUDE(CAM_INTF_META_SHADING_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_STATS_FACEDETECT_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_STATS_HISTOGRAM_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP, cam_sharpness_map_t, 3);
+ INCLUDE(CAM_INTF_META_TONEMAP_CURVES, cam_rgb_tonemap_curves, 1);
+ INCLUDE(CAM_INTF_META_LENS_SHADING_MAP, cam_lens_shading_map_t, 1);
+ INCLUDE(CAM_INTF_META_AEC_INFO, cam_3a_params_t, 1);
+ INCLUDE(CAM_INTF_META_SENSOR_INFO, cam_sensor_params_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_AE, cam_ae_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_AWB, cam_awb_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_AF, cam_af_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_ASD, cam_asd_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_STATS, cam_stats_buffer_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_BESTATS, cam_bestats_buffer_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_BHIST, cam_bhist_buffer_exif_debug_t, 1);
+ INCLUDE(CAM_INTF_META_EXIF_DEBUG_3A_TUNING, cam_q3a_tuning_info_t, 1);
+ INCLUDE(CAM_INTF_META_ASD_SCENE_CAPTURE_TYPE, cam_auto_scene_t, 1);
+ INCLUDE(CAM_INTF_PARM_EFFECT, uint32_t, 1);
+ /* Defining as int32_t so that this array is 4 byte aligned */
+ INCLUDE(CAM_INTF_META_PRIVATE_DATA, int32_t,
+ MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / 4);
+
+ /* Following are Params only and not metadata currently */
+ INCLUDE(CAM_INTF_PARM_HAL_VERSION, int32_t, 1);
+ /* Shared between HAL1 and HAL3 */
+ INCLUDE(CAM_INTF_PARM_ANTIBANDING, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_EV_STEP, cam_rational_type_t, 1);
+ INCLUDE(CAM_INTF_PARM_AEC_LOCK, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_FPS_RANGE, cam_fps_range_t, 1);
+ INCLUDE(CAM_INTF_PARM_AWB_LOCK, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_BESTSHOT_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_DIS_ENABLE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_LED_MODE, int32_t, 1);
+ INCLUDE(CAM_INTF_META_LED_MODE_OVERRIDE, uint32_t, 1);
+
+ /* dual camera specific params */
+ INCLUDE(CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION, cam_related_system_calibration_data_t, 1);
+ INCLUDE(CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, cam_focal_length_ratio_t, 1);
+ INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_SENSOR, cam_stream_crop_info_t, 1);
+ INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_CAMIF, cam_stream_crop_info_t, 1);
+ INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_ISP, cam_stream_crop_info_t, 1);
+ INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_CPP, cam_stream_crop_info_t, 1);
+ INCLUDE(CAM_INTF_META_DCRF, cam_dcrf_result_t, 1);
+
+ /* HAL1 specific */
+ /* read only */
+ INCLUDE(CAM_INTF_PARM_QUERY_FLASH4SNAP, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_EXPOSURE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_SHARPNESS, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_CONTRAST, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_SATURATION, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_BRIGHTNESS, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_ISO, cam_intf_parm_manual_3a_t, 1);
+ INCLUDE(CAM_INTF_PARM_EXPOSURE_TIME, cam_intf_parm_manual_3a_t, 1);
+ INCLUDE(CAM_INTF_PARM_ZOOM, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_ROLLOFF, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_MODE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_AEC_ALGO_TYPE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_FOCUS_ALGO_TYPE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_AEC_ROI, cam_set_aec_roi_t, 1);
+ INCLUDE(CAM_INTF_PARM_AF_ROI, cam_roi_info_t, 1);
+ INCLUDE(CAM_INTF_PARM_SCE_FACTOR, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_FD, cam_fd_set_parm_t, 1);
+ INCLUDE(CAM_INTF_PARM_MCE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_HFR, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_REDEYE_REDUCTION, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_WAVELET_DENOISE, cam_denoise_param_t, 1);
+ INCLUDE(CAM_INTF_PARM_TEMPORAL_DENOISE, cam_denoise_param_t, 1);
+ INCLUDE(CAM_INTF_PARM_HISTOGRAM, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_ASD_ENABLE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_RECORDING_HINT, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_HDR, cam_exp_bracketing_t, 1);
+ INCLUDE(CAM_INTF_PARM_FRAMESKIP, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_ZSL_MODE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_HDR_NEED_1X, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_LOCK_CAF, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_VIDEO_HDR, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_SENSOR_HDR, cam_sensor_hdr_type_t, 1);
+ INCLUDE(CAM_INTF_PARM_VT, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_SET_AUTOFOCUSTUNING, tune_actuator_t, 1);
+ INCLUDE(CAM_INTF_PARM_SET_VFE_COMMAND, tune_cmd_t, 1);
+ INCLUDE(CAM_INTF_PARM_SET_PP_COMMAND, tune_cmd_t, 1);
+ INCLUDE(CAM_INTF_PARM_MAX_DIMENSION, cam_dimension_t, 1);
+ INCLUDE(CAM_INTF_PARM_RAW_DIMENSION, cam_dimension_t, 1);
+ INCLUDE(CAM_INTF_PARM_TINTLESS, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_WB_MANUAL, cam_manual_wb_parm_t, 1);
+ INCLUDE(CAM_INTF_PARM_CDS_MODE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_EZTUNE_CMD, cam_eztune_cmd_data_t, 1);
+ INCLUDE(CAM_INTF_PARM_INT_EVT, cam_int_evt_params_t, 1);
+ INCLUDE(CAM_INTF_PARM_RDI_MODE, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_BURST_NUM, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_RETRO_BURST_NUM, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_BURST_LED_ON_PERIOD, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_LONGSHOT_ENABLE, int8_t, 1);
+ INCLUDE(CAM_INTF_PARM_TONE_MAP_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_TOUCH_AE_RESULT, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_DUAL_LED_CALIBRATION, int32_t, 1);
+ INCLUDE(CAM_INTF_PARM_ADV_CAPTURE_MODE, uint8_t, 1);
+ INCLUDE(CAM_INTF_PARM_QUADRA_CFA, int32_t, 1);
+
+ /* HAL3 specific */
+ INCLUDE(CAM_INTF_META_STREAM_INFO, cam_stream_size_info_t, 1);
+ INCLUDE(CAM_INTF_META_AEC_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, cam_trigger_t, 1);
+ INCLUDE(CAM_INTF_META_AF_TRIGGER, cam_trigger_t, 1);
+ INCLUDE(CAM_INTF_META_CAPTURE_INTENT, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_DEMOSAIC, int32_t, 1);
+ INCLUDE(CAM_INTF_META_SHARPNESS_STRENGTH, int32_t, 1);
+ INCLUDE(CAM_INTF_META_GEOMETRIC_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_GEOMETRIC_STRENGTH, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_LENS_SHADING_MAP_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_SHADING_STRENGTH, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_TONEMAP_MODE, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_STREAM_ID, cam_stream_ID_t, 1);
+ INCLUDE(CAM_INTF_PARM_STATS_DEBUG_MASK, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_STATS_AF_PAAF, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_FOCUS_BRACKETING, cam_af_bracketing_t, 1);
+ INCLUDE(CAM_INTF_PARM_FLASH_BRACKETING, cam_flash_bracketing_t, 1);
+ INCLUDE(CAM_INTF_META_JPEG_GPS_COORDINATES, double, 3);
+ INCLUDE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, uint8_t, GPS_PROCESSING_METHOD_SIZE);
+ INCLUDE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, int64_t, 1);
+ INCLUDE(CAM_INTF_META_JPEG_ORIENTATION, int32_t, 1);
+ INCLUDE(CAM_INTF_META_JPEG_QUALITY, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_JPEG_THUMB_QUALITY, uint32_t, 1);
+ INCLUDE(CAM_INTF_META_JPEG_THUMB_SIZE, cam_dimension_t, 1);
+ INCLUDE(CAM_INTF_META_TEST_PATTERN_DATA, cam_test_pattern_data_t, 1);
+ INCLUDE(CAM_INTF_META_PROFILE_TONE_CURVE, cam_profile_tone_curve, 1);
+ INCLUDE(CAM_INTF_META_OTP_WB_GRGB, float, 1);
+ INCLUDE(CAM_INTF_META_IMG_HYST_INFO, cam_img_hysterisis_info_t, 1);
+ INCLUDE(CAM_INTF_META_CAC_INFO, cam_cac_info_t, 1);
+ INCLUDE(CAM_INTF_PARM_CAC, cam_aberration_mode_t, 1);
+ INCLUDE(CAM_INTF_META_NEUTRAL_COL_POINT, cam_neutral_col_point_t, 1);
+ INCLUDE(CAM_INTF_PARM_ROTATION, cam_rotation_info_t, 1);
+ INCLUDE(CAM_INTF_PARM_HW_DATA_OVERWRITE, cam_hw_data_overwrite_t, 1);
+ INCLUDE(CAM_INTF_META_IMGLIB, cam_intf_meta_imglib_t, 1);
+ INCLUDE(CAM_INTF_PARM_CAPTURE_FRAME_CONFIG, cam_capture_frame_config_t, 1);
+ INCLUDE(CAM_INTF_PARM_CUSTOM, custom_parm_buffer_t, 1);
+ INCLUDE(CAM_INTF_PARM_FLIP, int32_t, 1);
+ INCLUDE(CAM_INTF_META_USE_AV_TIMER, uint8_t, 1);
+ INCLUDE(CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, float, 1);
+ INCLUDE(CAM_INTF_META_LDAF_EXIF, uint32_t, 2);
+ INCLUDE(CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, cam_black_level_metadata_t, 1);
+ INCLUDE(CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, cam_black_level_metadata_t, 1);
+ INCLUDE(CAM_INTF_META_LOW_LIGHT, cam_low_light_mode_t, 1);
+ INCLUDE(CAM_INTF_META_IMG_DYN_FEAT, cam_dyn_img_data_t, 1);
+ INCLUDE(CAM_INTF_PARM_MANUAL_CAPTURE_TYPE, cam_manual_capture_type, 1);
+ INCLUDE(CAM_INTF_AF_STATE_TRANSITION, uint8_t, 1);
+ INCLUDE(CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX, uint32_t, 1);
+ INCLUDE(CAM_INTF_PARM_INSTANT_AEC, uint8_t, 1);
+ INCLUDE(CAM_INTF_META_REPROCESS_FLAGS, uint8_t, 1);
+ INCLUDE(CAM_INTF_PARM_JPEG_ENCODE_CROP, cam_stream_crop_info_t, 1);
+ INCLUDE(CAM_INTF_PARM_JPEG_SCALE_DIMENSION, cam_dimension_t, 1);
+} metadata_data_t;
+
+/* Update clear_metadata_buffer() function when a new is_xxx_valid is added to
+ * or removed from this structure */
+typedef struct {
+ union{
+ /* Hash table of 'is valid' flags */
+ uint8_t is_valid[CAM_INTF_PARM_MAX];
+
+ /* Hash table of 'is required' flags for the GET PARAM */
+ uint8_t is_reqd[CAM_INTF_PARM_MAX];
+ };
+ metadata_data_t data;
+ /*Tuning Data */
+ uint8_t is_tuning_params_valid;
+ tuning_params_t tuning_params;
+
+ /* Mobicat Params */
+ uint8_t is_mobicat_aec_params_valid;
+ cam_3a_params_t mobicat_aec_params;
+
+ /* Stats 3A Debug Params */
+ uint8_t is_statsdebug_ae_params_valid;
+ cam_ae_exif_debug_t statsdebug_ae_data;
+
+ uint8_t is_statsdebug_awb_params_valid;
+ cam_awb_exif_debug_t statsdebug_awb_data;
+
+ uint8_t is_statsdebug_af_params_valid;
+ cam_af_exif_debug_t statsdebug_af_data;
+
+ uint8_t is_statsdebug_asd_params_valid;
+ cam_asd_exif_debug_t statsdebug_asd_data;
+
+ uint8_t is_statsdebug_stats_params_valid;
+ cam_stats_buffer_exif_debug_t statsdebug_stats_buffer_data;
+
+ uint8_t is_statsdebug_bestats_params_valid;
+ cam_bestats_buffer_exif_debug_t statsdebug_bestats_buffer_data;
+
+ uint8_t is_statsdebug_bhist_params_valid;
+ cam_bhist_buffer_exif_debug_t statsdebug_bhist_data;
+
+ uint8_t is_statsdebug_3a_tuning_params_valid;
+ cam_q3a_tuning_info_t statsdebug_3a_tuning_data;
+
+} metadata_buffer_t;
+
+typedef metadata_buffer_t parm_buffer_t;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Update this inline function when a new is_xxx_valid is added to
+ * or removed from metadata_buffer_t */
+static inline void clear_metadata_buffer(metadata_buffer_t *meta)
+{
+ if (meta) {
+ memset(meta->is_valid, 0, CAM_INTF_PARM_MAX);
+ meta->is_tuning_params_valid = 0;
+ meta->is_mobicat_aec_params_valid = 0;
+ meta->is_statsdebug_ae_params_valid = 0;
+ meta->is_statsdebug_awb_params_valid = 0;
+ meta->is_statsdebug_af_params_valid = 0;
+ meta->is_statsdebug_asd_params_valid = 0;
+ meta->is_statsdebug_stats_params_valid = 0;
+ meta->is_statsdebug_bestats_params_valid = 0;
+ meta->is_statsdebug_bhist_params_valid = 0;
+ meta->is_statsdebug_3a_tuning_params_valid = 0;
+ }
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/camera/QCamera2/stack/common/cam_list.h b/camera/QCamera2/stack/common/cam_list.h
new file mode 100644
index 0000000..0fbfa07
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_list.h
@@ -0,0 +1,84 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+/* This file is a slave copy from /vendor/qcom/propreitary/mm-cammera/common,
+ * Please do not modify it directly here. */
+
+#ifndef __CAMLIST_H
+#define __CAMLIST_H
+
+// System dependency
+#include <stdlib.h>
+
+#define member_of(ptr, type, member) ({ \
+ const typeof(((type *)0)->member) *__mptr = (ptr); \
+ (type *)((char *)__mptr - offsetof(type,member));})
+
+struct cam_list {
+ struct cam_list *next, *prev;
+};
+
+static inline void cam_list_init(struct cam_list *ptr)
+{
+ ptr->next = ptr;
+ ptr->prev = ptr;
+}
+
+static inline void cam_list_add_tail_node(struct cam_list *item,
+ struct cam_list *head)
+{
+ struct cam_list *prev = head->prev;
+
+ head->prev = item;
+ item->next = head;
+ item->prev = prev;
+ prev->next = item;
+}
+
+static inline void cam_list_insert_before_node(struct cam_list *item,
+ struct cam_list *node)
+{
+ item->next = node;
+ item->prev = node->prev;
+ item->prev->next = item;
+ node->prev = item;
+}
+
+static inline void cam_list_del_node(struct cam_list *ptr)
+{
+ struct cam_list *prev = ptr->prev;
+ struct cam_list *next = ptr->next;
+
+ next->prev = ptr->prev;
+ prev->next = ptr->next;
+ ptr->next = ptr;
+ ptr->prev = ptr;
+}
+
+#endif /* __CAMLIST_H */
diff --git a/camera/QCamera2/stack/common/cam_queue.h b/camera/QCamera2/stack/common/cam_queue.h
new file mode 100644
index 0000000..fbb5f63
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_queue.h
@@ -0,0 +1,134 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "cam_list.h"
+
+typedef struct {
+ struct cam_list list;
+ void *data;
+} cam_node_t;
+
+typedef struct {
+ cam_node_t head; /* dummy head */
+ uint32_t size;
+ pthread_mutex_t lock;
+} cam_queue_t;
+
+static inline int32_t cam_queue_init(cam_queue_t *queue)
+{
+ pthread_mutex_init(&queue->lock, NULL);
+ cam_list_init(&queue->head.list);
+ queue->size = 0;
+ return 0;
+}
+
+static inline int32_t cam_queue_enq(cam_queue_t *queue, void *data)
+{
+ cam_node_t *node =
+ (cam_node_t *)malloc(sizeof(cam_node_t));
+ if (NULL == node) {
+ return -1;
+ }
+
+ memset(node, 0, sizeof(cam_node_t));
+ node->data = data;
+
+ pthread_mutex_lock(&queue->lock);
+ cam_list_add_tail_node(&node->list, &queue->head.list);
+ queue->size++;
+ pthread_mutex_unlock(&queue->lock);
+
+ return 0;
+}
+
+static inline void *cam_queue_deq(cam_queue_t *queue)
+{
+ cam_node_t *node = NULL;
+ void *data = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ if (pos != head) {
+ node = member_of(pos, cam_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+ }
+ pthread_mutex_unlock(&queue->lock);
+
+ if (NULL != node) {
+ data = node->data;
+ free(node);
+ }
+
+ return data;
+}
+
+static inline int32_t cam_queue_flush(cam_queue_t *queue)
+{
+ cam_node_t *node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+
+ while(pos != head) {
+ node = member_of(pos, cam_node_t, list);
+ pos = pos->next;
+ cam_list_del_node(&node->list);
+ queue->size--;
+
+ /* TODO later to consider ptr inside data */
+ /* for now we only assume there is no ptr inside data
+ * so we free data directly */
+ if (NULL != node->data) {
+ free(node->data);
+ }
+ free(node);
+
+ }
+ queue->size = 0;
+ pthread_mutex_unlock(&queue->lock);
+ return 0;
+}
+
+static inline int32_t cam_queue_deinit(cam_queue_t *queue)
+{
+ cam_queue_flush(queue);
+ pthread_mutex_destroy(&queue->lock);
+ return 0;
+}
diff --git a/camera/QCamera2/stack/common/cam_semaphore.h b/camera/QCamera2/stack/common/cam_semaphore.h
new file mode 100644
index 0000000..a35634c
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_semaphore.h
@@ -0,0 +1,88 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_SEMAPHORE_H__
+#define __QCAMERA_SEMAPHORE_H__
+
+// System dependencies
+#include <pthread.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Implement semaphore with mutex and conditional variable.
+ * Reason being, POSIX semaphore on Android are not used or
+ * well tested.
+ */
+
+typedef struct {
+ int val;
+ pthread_mutex_t mutex;
+ pthread_cond_t cond;
+} cam_semaphore_t;
+
+static inline void cam_sem_init(cam_semaphore_t *s, int n)
+{
+ pthread_mutex_init(&(s->mutex), NULL);
+ pthread_cond_init(&(s->cond), NULL);
+ s->val = n;
+}
+
+static inline void cam_sem_post(cam_semaphore_t *s)
+{
+ pthread_mutex_lock(&(s->mutex));
+ s->val++;
+ pthread_cond_signal(&(s->cond));
+ pthread_mutex_unlock(&(s->mutex));
+}
+
+static inline int cam_sem_wait(cam_semaphore_t *s)
+{
+ int rc = 0;
+ pthread_mutex_lock(&(s->mutex));
+ while (s->val == 0)
+ rc = pthread_cond_wait(&(s->cond), &(s->mutex));
+ s->val--;
+ pthread_mutex_unlock(&(s->mutex));
+ return rc;
+}
+
+static inline void cam_sem_destroy(cam_semaphore_t *s)
+{
+ pthread_mutex_destroy(&(s->mutex));
+ pthread_cond_destroy(&(s->cond));
+ s->val = 0;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_SEMAPHORE_H__ */
diff --git a/camera/QCamera2/stack/common/cam_types.h b/camera/QCamera2/stack/common/cam_types.h
new file mode 100644
index 0000000..421a3ba
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_types.h
@@ -0,0 +1,2753 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_TYPES_H__
+#define __QCAMERA_TYPES_H__
+
+// System dependencies
+#include <stdint.h>
+#include <media/msmb_camera.h>
+
+#define CAM_MAX_NUM_BUFS_PER_STREAM 64
+#define MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES 8096
+#define AWB_DEBUG_DATA_SIZE (45000)
+#define AEC_DEBUG_DATA_SIZE (5000)
+#define AF_DEBUG_DATA_SIZE (50000)
+#define ASD_DEBUG_DATA_SIZE (100)
+#define STATS_BUFFER_DEBUG_DATA_SIZE (75000)
+#define BESTATS_BUFFER_DEBUG_DATA_SIZE (150000)
+#define BHIST_STATS_DEBUG_DATA_SIZE (70000)
+#define TUNING_INFO_DEBUG_DATA_SIZE (4)
+
+#define CEILING64(X) (((X) + 0x0003F) & 0xFFFFFFC0)
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X) (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X) (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ZOOMS_CNT 91
+#define MAX_SIZES_CNT 40
+#define MAX_EXP_BRACKETING_LENGTH 32
+#define MAX_ROI 10
+#define MAX_STREAM_NUM_IN_BUNDLE 8
+#define MAX_NUM_STREAMS 8
+#define CHROMATIX_SIZE 60000
+#define COMMONCHROMATIX_SIZE 45000
+#define CPPCHROMATIX_SIZE 36000
+#define SWPOSTPROCCHROMATIX_SIZE 36000
+#define AFTUNE_SIZE 32768
+#define A3CHROMATIX_SIZE 30000
+#define MAX_SCALE_SIZES_CNT 8
+#define MAX_SAMP_DECISION_CNT 64
+#define SENSOR_PHYSICAL_SIZE_CNT 2
+#define EXPOSURE_TIME_RANGE_CNT 2
+#define BLACK_LEVEL_PATTERN_CNT 4
+#define FORWARD_MATRIX_COLS 3
+#define FORWARD_MATRIX_ROWS 3
+#define COLOR_TRANSFORM_COLS 3
+#define COLOR_TRANSFORM_ROWS 3
+#define CAL_TRANSFORM_COLS 3
+#define CAL_TRANSFORM_ROWS 3
+
+#define MAX_ISP_DATA_SIZE (20*1024)
+#define MAX_PP_DATA_SIZE 16384
+#define MAX_AE_STATS_DATA_SIZE 1000
+#define MAX_AWB_STATS_DATA_SIZE 1000
+#define MAX_AF_STATS_DATA_SIZE 1000
+#define MAX_ASD_STATS_DATA_SIZE 1000
+
+#define MAX_CAPTURE_BATCH_NUM 32
+
+#define TUNING_DATA_VERSION 6
+#define TUNING_SENSOR_DATA_MAX 0x10000 /*(need value from sensor team)*/
+#define TUNING_VFE_DATA_MAX 0x10000 /*(need value from vfe team)*/
+#define TUNING_CPP_DATA_MAX 0x10000 /*(need value from pproc team)*/
+#define TUNING_CAC_DATA_MAX 0x10000 /*(need value from imglib team)*/
+#define TUNING_DATA_MAX (TUNING_SENSOR_DATA_MAX + \
+ TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX + \
+ TUNING_CAC_DATA_MAX)
+
+#define TUNING_SENSOR_DATA_OFFSET 0
+#define TUNING_VFE_DATA_OFFSET TUNING_SENSOR_DATA_MAX
+#define TUNING_CPP_DATA_OFFSET (TUNING_SENSOR_DATA_MAX + TUNING_VFE_DATA_MAX)
+#define TUNING_CAC_DATA_OFFSET (TUNING_SENSOR_DATA_MAX + \
+ TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX)
+#define MAX_STATS_DATA_SIZE 4000
+
+#define MAX_AF_BRACKETING_VALUES 5
+#define MAX_TEST_PATTERN_CNT 8
+
+#define GPS_PROCESSING_METHOD_SIZE 33
+#define EXIF_IMAGE_DESCRIPTION_SIZE 100
+
+#define MAX_INFLIGHT_REQUESTS 6
+#define MAX_INFLIGHT_BLOB 3
+#define MIN_INFLIGHT_REQUESTS 3
+#define MAX_INFLIGHT_REPROCESS_REQUESTS 1
+#define MAX_INFLIGHT_HFR_REQUESTS (48)
+#define MIN_INFLIGHT_HFR_REQUESTS (40)
+
+#define QCAMERA_DUMP_FRM_LOCATION "/data/misc/camera/"
+#define QCAMERA_MAX_FILEPATH_LENGTH 64
+
+#define LIKELY(x) __builtin_expect((x), true)
+#define UNLIKELY(x) __builtin_expect((x), false)
+
+#define RELCAM_CALIB_ROT_MATRIX_MAX 9
+#define RELCAM_CALIB_SURFACE_PARMS_MAX 32
+#define RELCAM_CALIB_RESERVED_MAX 50
+
+#define MAX_NUM_CAMERA_PER_BUNDLE 2 /* Max number of cameras per bundle */
+#define EXTRA_FRAME_SYNC_BUFFERS 4 /* Extra frame sync buffers in dc mode*/
+#define MM_CAMERA_FRAME_SYNC_NODES EXTRA_FRAME_SYNC_BUFFERS
+
+#define MAX_REPROCESS_STALL 2
+
+#define QCAMERA_MAX_FILEPATH_LENGTH 64
+
+#define MAX_EEPROM_VERSION_INFO_LEN 32
+
+#define MAX_OPTICAL_BLACK_REGIONS 5
+
+/*reprocess pipeline stages are pproc and jpeg */
+#define MAX_REPROCESS_PIPELINE_STAGES 2
+
+/* Defines the number of rows in the color correction matrix (CCM) */
+#define AWB_NUM_CCM_ROWS (3)
+
+/* Defines the number of columns in the color correction matrix (CCM) */
+#define AWB_NUM_CCM_COLS (3)
+
+typedef uint64_t cam_feature_mask_t;
+
+typedef enum {
+ CAM_HAL_V1 = 1,
+ CAM_HAL_V3 = 3
+} cam_hal_version_t;
+
+typedef enum {
+ CAM_STATUS_SUCCESS, /* Operation Succeded */
+ CAM_STATUS_FAILED, /* Failure in doing operation */
+ CAM_STATUS_INVALID_PARM, /* Inavlid parameter provided */
+ CAM_STATUS_NOT_SUPPORTED, /* Parameter/operation not supported */
+ CAM_STATUS_ACCEPTED, /* Parameter accepted */
+ CAM_STATUS_MAX,
+} cam_status_t;
+
+typedef enum {
+ /* back main camera */
+ CAM_POSITION_BACK,
+ /* front main camera */
+ CAM_POSITION_FRONT,
+ /* back aux camera */
+ CAM_POSITION_BACK_AUX,
+ /* front aux camera */
+ CAM_POSITION_FRONT_AUX
+} cam_position_t;
+
+typedef enum {
+ CAM_FLICKER_NONE,
+ CAM_FLICKER_50_HZ,
+ CAM_FLICKER_60_HZ
+} cam_flicker_t;
+
+typedef enum {
+ CAM_FORMAT_JPEG = 0,
+ CAM_FORMAT_YUV_420_NV12 = 1,
+ CAM_FORMAT_YUV_420_NV21,
+ CAM_FORMAT_YUV_420_NV21_ADRENO,
+ CAM_FORMAT_YUV_420_YV12,
+ CAM_FORMAT_YUV_422_NV16,
+ CAM_FORMAT_YUV_422_NV61,
+ CAM_FORMAT_YUV_420_NV12_VENUS,
+ /* Note: For all raw formats, each scanline needs to be 16 bytes aligned */
+
+ /* Packed YUV/YVU raw format, 16 bpp: 8 bits Y and 8 bits UV.
+ * U and V are interleaved with Y: YUYV or YVYV */
+ CAM_FORMAT_YUV_RAW_8BIT_YUYV,
+ CAM_FORMAT_YUV_RAW_8BIT_YVYU,
+ CAM_FORMAT_YUV_RAW_8BIT_UYVY, //10
+ CAM_FORMAT_YUV_RAW_8BIT_VYUY,
+
+ /* QCOM RAW formats where data is packed into 64bit word.
+ * 8BPP: 1 64-bit word contains 8 pixels p0 - p7, where p0 is
+ * stored at LSB.
+ * 10BPP: 1 64-bit word contains 6 pixels p0 - p5, where most
+ * significant 4 bits are set to 0. P0 is stored at LSB.
+ * 12BPP: 1 64-bit word contains 5 pixels p0 - p4, where most
+ * significant 4 bits are set to 0. P0 is stored at LSB. */
+ CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG,
+ CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG,
+ CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB,
+ CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR,
+ CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG,
+ CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG,
+ CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB,
+ CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR,
+ CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG, //20
+ CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG,
+ CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB,
+ CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR,
+ /* MIPI RAW formats based on MIPI CSI-2 specifiction.
+ * 8BPP: Each pixel occupies one bytes, starting at LSB.
+ * Output with of image has no restrictons.
+ * 10BPP: Four pixels are held in every 5 bytes. The output
+ * with of image must be a multiple of 4 pixels.
+ * 12BPP: Two pixels are held in every 3 bytes. The output
+ * width of image must be a multiple of 2 pixels. */
+ CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG,
+ CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG,
+ CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB,
+ CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR,
+ CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG,
+ CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG,
+ CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB, //30
+ CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR,
+ CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG,
+ CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG,
+ CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB,
+ CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR,
+ /* Ideal raw formats where image data has gone through black
+ * correction, lens rolloff, demux/channel gain, bad pixel
+ * correction, and ABF.
+ * Ideal raw formats could output any of QCOM_RAW and MIPI_RAW
+ * formats, plus plain8 8bbp, plain16 800, plain16 10bpp, and
+ * plain 16 12bpp */
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG, //40
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB, //50
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG, //60
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB, //70
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR,
+
+ /* generic 8-bit raw */
+ CAM_FORMAT_JPEG_RAW_8BIT,
+ CAM_FORMAT_META_RAW_8BIT,
+
+ /* QCOM RAW formats where data is packed into 64bit word.
+ * 14BPP: 1 64-bit word contains 4 pixels p0 - p3, where most
+ * significant 4 bits are set to 0. P0 is stored at LSB.
+ */
+ CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG,
+ CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG,
+ CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB, //80
+ CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR,
+ /* MIPI RAW formats based on MIPI CSI-2 specifiction.
+ * 14 BPPP: 1st byte: P0 [13:6]
+ * 2nd byte: P1 [13:6]
+ * 3rd byte: P2 [13:6]
+ * 4th byte: P3 [13:6]
+ * 5th byte: P0 [5:0]
+ * 7th byte: P1 [5:0]
+ * 8th byte: P2 [5:0]
+ * 9th byte: P3 [5:0]
+ */
+ CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG,
+ CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG,
+ CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB,
+ CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG, //90
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR,
+ /* 14BPP: 1st byte: P0 [8:0]
+ * 2nd byte: P0 [13:9]
+ * 3rd byte: P1 [8:0]
+ * 4th byte: P1 [13:9]
+ */
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB,
+ CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR,
+
+ CAM_FORMAT_YUV_444_NV24,
+ CAM_FORMAT_YUV_444_NV42,
+
+ /* Y plane only, used for FD, 8BPP */
+ CAM_FORMAT_Y_ONLY, //100
+
+ /* UBWC format */
+ CAM_FORMAT_YUV_420_NV12_UBWC,
+
+ CAM_FORMAT_YUV_420_NV21_VENUS,
+
+ /* RGB formats */
+ CAM_FORMAT_8888_ARGB,
+
+ /* Y plane only */
+ CAM_FORMAT_Y_ONLY_10_BPP,
+ CAM_FORMAT_Y_ONLY_12_BPP,
+ CAM_FORMAT_Y_ONLY_14_BPP,
+ CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY,
+ CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY,
+ CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY,
+ CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY,
+ CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY,
+ CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY,
+ CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY,
+ CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY,
+ CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY,
+
+ CAM_FORMAT_MAX
+} cam_format_t;
+
+typedef enum {
+ CAM_STREAM_TYPE_DEFAULT, /* default stream type */
+ CAM_STREAM_TYPE_PREVIEW, /* preview */
+ CAM_STREAM_TYPE_POSTVIEW, /* postview */
+ CAM_STREAM_TYPE_SNAPSHOT, /* snapshot */
+ CAM_STREAM_TYPE_VIDEO, /* video */
+ CAM_STREAM_TYPE_CALLBACK, /* app requested callback */
+ CAM_STREAM_TYPE_IMPL_DEFINED, /* opaque format: could be display, video enc, ZSL YUV */
+ CAM_STREAM_TYPE_METADATA, /* meta data */
+ CAM_STREAM_TYPE_RAW, /* raw dump from camif */
+ CAM_STREAM_TYPE_OFFLINE_PROC, /* offline process */
+ CAM_STREAM_TYPE_PARM, /* mct internal stream */
+ CAM_STREAM_TYPE_ANALYSIS, /* analysis stream */
+ CAM_STREAM_TYPE_MAX,
+} cam_stream_type_t;
+
+typedef enum {
+ CAM_PAD_NONE = 1,
+ CAM_PAD_TO_2 = 2,
+ CAM_PAD_TO_4 = 4,
+ CAM_PAD_TO_WORD = CAM_PAD_TO_4,
+ CAM_PAD_TO_8 = 8,
+ CAM_PAD_TO_16 = 16,
+ CAM_PAD_TO_32 = 32,
+ CAM_PAD_TO_64 = 64,
+ CAM_PAD_TO_128 = 128,
+ CAM_PAD_TO_256 = 256,
+ CAM_PAD_TO_512 = 512,
+ CAM_PAD_TO_1K = 1024,
+ CAM_PAD_TO_2K = 2048,
+ CAM_PAD_TO_4K = 4096,
+ CAM_PAD_TO_8K = 8192
+} cam_pad_format_t;
+
+typedef enum {
+ /* followings are per camera */
+ CAM_MAPPING_BUF_TYPE_CAPABILITY, /* mapping camera capability buffer */
+ CAM_MAPPING_BUF_TYPE_PARM_BUF, /* mapping parameters buffer */
+ /* this buffer is needed for the payload to be sent with bundling related cameras cmd */
+ CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF, /* mapping sync buffer.*/
+
+ /* followings are per stream */
+ CAM_MAPPING_BUF_TYPE_STREAM_BUF, /* mapping stream buffers */
+ CAM_MAPPING_BUF_TYPE_STREAM_INFO, /* mapping stream information buffer */
+ CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, /* mapping offline process input buffer */
+ CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF, /* mapping offline meta buffer */
+ CAM_MAPPING_BUF_TYPE_MISC_BUF, /* mapping offline miscellaneous buffer */
+ CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF, /* mapping user ptr stream buffers */
+ CAM_MAPPING_BUF_TYPE_MAX
+} cam_mapping_buf_type;
+
+typedef enum {
+ CAM_STREAM_BUF_TYPE_MPLANE, /* Multiplanar Buffer type */
+ CAM_STREAM_BUF_TYPE_USERPTR, /* User specific structure pointer*/
+ CAM_STREAM_BUF_TYPE_MAX
+} cam_stream_buf_type;
+
+typedef struct {
+ cam_mapping_buf_type type;
+ uint32_t stream_id; /* stream id: valid if STREAM_BUF */
+ uint32_t frame_idx; /* frame index: valid if type is STREAM_BUF */
+ int32_t plane_idx; /* planner index. valid if type is STREAM_BUF.
+ * -1 means all planners shanre the same fd;
+ * otherwise, each planner has its own fd */
+ uint32_t cookie; /* could be job_id(uint32_t) to identify mapping job */
+ int32_t fd; /* origin fd */
+ size_t size; /* size of the buffer */
+} cam_buf_map_type;
+
+typedef struct {
+ uint32_t length;
+ cam_buf_map_type buf_maps[CAM_MAX_NUM_BUFS_PER_STREAM];
+} cam_buf_map_type_list;
+
+typedef struct {
+ cam_mapping_buf_type type;
+ uint32_t stream_id; /* stream id: valid if STREAM_BUF */
+ uint32_t frame_idx; /* frame index: valid if STREAM_BUF or HIST_BUF */
+ int32_t plane_idx; /* planner index. valid if type is STREAM_BUF.
+ * -1 means all planners shanre the same fd;
+ * otherwise, each planner has its own fd */
+ uint32_t cookie; /* could be job_id(uint32_t) to identify unmapping job */
+} cam_buf_unmap_type;
+
+typedef struct {
+ uint32_t length;
+ cam_buf_unmap_type buf_unmaps[CAM_MAX_NUM_BUFS_PER_STREAM];
+} cam_buf_unmap_type_list;
+
+typedef enum {
+ CAM_MAPPING_TYPE_FD_MAPPING,
+ CAM_MAPPING_TYPE_FD_UNMAPPING,
+ CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING,
+ CAM_MAPPING_TYPE_FD_BUNDLED_UNMAPPING,
+ CAM_MAPPING_TYPE_MAX
+} cam_mapping_type;
+
+typedef struct {
+ cam_mapping_type msg_type;
+ union {
+ cam_buf_map_type buf_map;
+ cam_buf_unmap_type buf_unmap;
+ cam_buf_map_type_list buf_map_list;
+ cam_buf_unmap_type_list buf_unmap_list;
+ } payload;
+} cam_sock_packet_t;
+
+typedef enum {
+ CAM_MODE_2D = (1<<0),
+ CAM_MODE_3D = (1<<1)
+} cam_mode_t;
+
+typedef struct {
+ uint32_t len;
+ uint32_t y_offset;
+ uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+ uint32_t len;
+ uint32_t offset;
+ int32_t offset_x;
+ int32_t offset_y;
+ int32_t stride;
+ int32_t stride_in_bytes;
+ int32_t scanline;
+ int32_t width; /* width without padding */
+ int32_t height; /* height without padding */
+ int32_t meta_stride; /*Meta stride*/
+ int32_t meta_scanline; /*Meta Scanline*/
+ int32_t meta_len; /*Meta plane length including 4k padding*/
+} cam_mp_len_offset_t;
+
+typedef struct {
+ uint32_t offset_x;
+ uint32_t offset_y;
+} cam_offset_info_t;
+
+typedef struct {
+ uint32_t width_padding;
+ uint32_t height_padding;
+ uint32_t plane_padding;
+ uint32_t min_stride;
+ uint32_t min_scanline;
+ cam_offset_info_t offset_info;
+} cam_padding_info_t;
+
+typedef struct {
+ uint32_t num_planes; /*Number of planes in planar buffer*/
+ union {
+ cam_sp_len_offset_t sp;
+ cam_mp_len_offset_t mp[VIDEO_MAX_PLANES];
+ };
+ uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+ uint8_t frame_buf_cnt; /*Total plane frames present in 1 batch*/
+ uint32_t size; /*Size of 1 batch buffer. Kernel structure size*/
+ long frameInterval; /*frame interval between each frame*/
+} cam_stream_user_buf_info_t;
+
+typedef struct {
+ int32_t width;
+ int32_t height;
+} cam_dimension_t;
+
+typedef struct {
+ cam_frame_len_offset_t plane_info;
+} cam_stream_buf_plane_info_t;
+
+typedef struct {
+ float min_fps;
+ float max_fps;
+ float video_min_fps;
+ float video_max_fps;
+} cam_fps_range_t;
+
+typedef struct {
+ int32_t min_sensitivity;
+ int32_t max_sensitivity;
+} cam_sensitivity_range_t;
+
+typedef enum {
+ CAM_HFR_MODE_OFF,
+ CAM_HFR_MODE_60FPS,
+ CAM_HFR_MODE_90FPS,
+ CAM_HFR_MODE_120FPS,
+ CAM_HFR_MODE_150FPS,
+ CAM_HFR_MODE_180FPS,
+ CAM_HFR_MODE_210FPS,
+ CAM_HFR_MODE_240FPS,
+ CAM_HFR_MODE_480FPS,
+ CAM_HFR_MODE_MAX
+} cam_hfr_mode_t;
+
+typedef struct {
+ cam_hfr_mode_t mode;
+ uint8_t dim_cnt; /* hfr sizes table count */
+ cam_dimension_t dim[MAX_SIZES_CNT]; /* hfr sizes table */
+ uint8_t livesnapshot_sizes_tbl_cnt; /* livesnapshot sizes table count */
+ cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT]; /* livesnapshot sizes table */
+} cam_hfr_info_t;
+
+typedef enum {
+ CAM_WB_MODE_AUTO,
+ CAM_WB_MODE_CUSTOM,
+ CAM_WB_MODE_INCANDESCENT,
+ CAM_WB_MODE_FLUORESCENT,
+ CAM_WB_MODE_WARM_FLUORESCENT,
+ CAM_WB_MODE_DAYLIGHT,
+ CAM_WB_MODE_CLOUDY_DAYLIGHT,
+ CAM_WB_MODE_TWILIGHT,
+ CAM_WB_MODE_SHADE,
+ CAM_WB_MODE_MANUAL,
+ CAM_WB_MODE_OFF,
+ CAM_WB_MODE_MAX
+} cam_wb_mode_type;
+
+typedef enum {
+ CAM_ANTIBANDING_MODE_OFF,
+ CAM_ANTIBANDING_MODE_60HZ,
+ CAM_ANTIBANDING_MODE_50HZ,
+ CAM_ANTIBANDING_MODE_AUTO,
+ CAM_ANTIBANDING_MODE_AUTO_50HZ,
+ CAM_ANTIBANDING_MODE_AUTO_60HZ,
+ CAM_ANTIBANDING_MODE_MAX,
+} cam_antibanding_mode_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+ CAM_ISO_MODE_AUTO,
+ CAM_ISO_MODE_DEBLUR,
+ CAM_ISO_MODE_100,
+ CAM_ISO_MODE_200,
+ CAM_ISO_MODE_400,
+ CAM_ISO_MODE_800,
+ CAM_ISO_MODE_1600,
+ CAM_ISO_MODE_3200,
+ CAM_ISO_MODE_MAX
+} cam_iso_mode_type;
+
+typedef enum {
+ CAM_AEC_MODE_FRAME_AVERAGE,
+ CAM_AEC_MODE_CENTER_WEIGHTED,
+ CAM_AEC_MODE_SPOT_METERING,
+ CAM_AEC_MODE_SMART_METERING,
+ CAM_AEC_MODE_USER_METERING,
+ CAM_AEC_MODE_SPOT_METERING_ADV,
+ CAM_AEC_MODE_CENTER_WEIGHTED_ADV,
+ CAM_AEC_MODE_MAX
+} cam_auto_exposure_mode_type;
+
+/* enum to select AEC convergence type */
+typedef enum {
+ /* Normal AEC connvergence */
+ CAM_AEC_NORMAL_CONVERGENCE = 0,
+ /* Aggressive AEC connvergence */
+ CAM_AEC_AGGRESSIVE_CONVERGENCE,
+ /* Fast AEC convergence */
+ CAM_AEC_FAST_CONVERGENCE,
+ CAM_AEC_CONVERGENCE_MAX
+} cam_aec_convergence_type;
+
+typedef enum {
+ CAM_AE_MODE_OFF,
+ CAM_AE_MODE_ON,
+ CAM_AE_MODE_MAX
+} cam_ae_mode_type;
+
+typedef enum {
+ CAM_FOCUS_ALGO_AUTO,
+ CAM_FOCUS_ALGO_SPOT,
+ CAM_FOCUS_ALGO_CENTER_WEIGHTED,
+ CAM_FOCUS_ALGO_AVERAGE,
+ CAM_FOCUS_ALGO_MAX
+} cam_focus_algorithm_type;
+
+/* Auto focus mode */
+typedef enum {
+ CAM_FOCUS_MODE_OFF,
+ CAM_FOCUS_MODE_AUTO,
+ CAM_FOCUS_MODE_INFINITY,
+ CAM_FOCUS_MODE_MACRO,
+ CAM_FOCUS_MODE_FIXED,
+ CAM_FOCUS_MODE_EDOF,
+ CAM_FOCUS_MODE_CONTINOUS_VIDEO,
+ CAM_FOCUS_MODE_CONTINOUS_PICTURE,
+ CAM_FOCUS_MODE_MANUAL,
+ CAM_FOCUS_MODE_MAX
+} cam_focus_mode_type;
+
+typedef enum {
+ CAM_MANUAL_FOCUS_MODE_INDEX,
+ CAM_MANUAL_FOCUS_MODE_DAC_CODE,
+ CAM_MANUAL_FOCUS_MODE_RATIO,
+ CAM_MANUAL_FOCUS_MODE_DIOPTER,
+ CAM_MANUAL_FOCUS_MODE_MAX
+} cam_manual_focus_mode_type;
+
+typedef struct {
+ cam_manual_focus_mode_type flag;
+ union{
+ int32_t af_manual_lens_position_index;
+ int32_t af_manual_lens_position_dac;
+ int32_t af_manual_lens_position_ratio;
+ float af_manual_diopter;
+ };
+} cam_manual_focus_parm_t;
+
+typedef enum {
+ CAM_MANUAL_WB_MODE_CCT,
+ CAM_MANUAL_WB_MODE_GAIN,
+ CAM_MANUAL_WB_MODE_MAX
+} cam_manual_wb_mode_type;
+
+typedef struct {
+ float r_gain;
+ float g_gain;
+ float b_gain;
+} cam_awb_gain_t;
+
+typedef struct {
+ cam_manual_wb_mode_type type;
+ union{
+ int32_t cct;
+ cam_awb_gain_t gains;
+ };
+} cam_manual_wb_parm_t;
+
+typedef enum {
+ CAM_SCENE_MODE_OFF,
+ CAM_SCENE_MODE_AUTO,
+ CAM_SCENE_MODE_LANDSCAPE,
+ CAM_SCENE_MODE_SNOW,
+ CAM_SCENE_MODE_BEACH,
+ CAM_SCENE_MODE_SUNSET,
+ CAM_SCENE_MODE_NIGHT,
+ CAM_SCENE_MODE_PORTRAIT,
+ CAM_SCENE_MODE_BACKLIGHT,
+ CAM_SCENE_MODE_SPORTS,
+ CAM_SCENE_MODE_ANTISHAKE,
+ CAM_SCENE_MODE_FLOWERS,
+ CAM_SCENE_MODE_CANDLELIGHT,
+ CAM_SCENE_MODE_FIREWORKS,
+ CAM_SCENE_MODE_PARTY,
+ CAM_SCENE_MODE_NIGHT_PORTRAIT,
+ CAM_SCENE_MODE_THEATRE,
+ CAM_SCENE_MODE_ACTION,
+ CAM_SCENE_MODE_AR,
+ CAM_SCENE_MODE_FACE_PRIORITY,
+ CAM_SCENE_MODE_BARCODE,
+ CAM_SCENE_MODE_HDR,
+ CAM_SCENE_MODE_AQUA,
+ CAM_SCENE_MODE_MAX
+} cam_scene_mode_type;
+
+typedef enum {
+ CAM_EFFECT_MODE_OFF,
+ CAM_EFFECT_MODE_MONO,
+ CAM_EFFECT_MODE_NEGATIVE,
+ CAM_EFFECT_MODE_SOLARIZE,
+ CAM_EFFECT_MODE_SEPIA,
+ CAM_EFFECT_MODE_POSTERIZE,
+ CAM_EFFECT_MODE_WHITEBOARD,
+ CAM_EFFECT_MODE_BLACKBOARD,
+ CAM_EFFECT_MODE_AQUA,
+ CAM_EFFECT_MODE_EMBOSS,
+ CAM_EFFECT_MODE_SKETCH,
+ CAM_EFFECT_MODE_NEON,
+ CAM_EFFECT_MODE_BEAUTY,
+ CAM_EFFECT_MODE_MAX
+} cam_effect_mode_type;
+
+typedef enum {
+ CAM_FLASH_MODE_OFF,
+ CAM_FLASH_MODE_AUTO,
+ CAM_FLASH_MODE_ON,
+ CAM_FLASH_MODE_TORCH,
+ CAM_FLASH_MODE_SINGLE,
+ CAM_FLASH_MODE_MAX
+} cam_flash_mode_t;
+
+// Flash States
+typedef enum {
+ CAM_FLASH_STATE_UNAVAILABLE,
+ CAM_FLASH_STATE_CHARGING,
+ CAM_FLASH_STATE_READY,
+ CAM_FLASH_STATE_FIRED,
+ CAM_FLASH_STATE_PARTIAL,
+ CAM_FLASH_STATE_MAX
+} cam_flash_state_t;
+
+typedef enum {
+ CAM_FLASH_FIRING_LEVEL_0,
+ CAM_FLASH_FIRING_LEVEL_1,
+ CAM_FLASH_FIRING_LEVEL_2,
+ CAM_FLASH_FIRING_LEVEL_3,
+ CAM_FLASH_FIRING_LEVEL_4,
+ CAM_FLASH_FIRING_LEVEL_5,
+ CAM_FLASH_FIRING_LEVEL_6,
+ CAM_FLASH_FIRING_LEVEL_7,
+ CAM_FLASH_FIRING_LEVEL_8,
+ CAM_FLASH_FIRING_LEVEL_9,
+ CAM_FLASH_FIRING_LEVEL_10,
+ CAM_FLASH_FIRING_LEVEL_MAX
+} cam_flash_firing_level_t;
+
+
+typedef enum {
+ CAM_AEC_TRIGGER_IDLE,
+ CAM_AEC_TRIGGER_START
+} cam_aec_trigger_type_t;
+
+typedef enum {
+ CAM_AF_TRIGGER_IDLE,
+ CAM_AF_TRIGGER_START,
+ CAM_AF_TRIGGER_CANCEL
+} cam_af_trigger_type_t;
+
+typedef enum {
+ CAM_AE_STATE_INACTIVE,
+ CAM_AE_STATE_SEARCHING,
+ CAM_AE_STATE_CONVERGED,
+ CAM_AE_STATE_LOCKED,
+ CAM_AE_STATE_FLASH_REQUIRED,
+ CAM_AE_STATE_PRECAPTURE
+} cam_ae_state_t;
+
+typedef enum {
+ CAM_NOISE_REDUCTION_MODE_OFF,
+ CAM_NOISE_REDUCTION_MODE_FAST,
+ CAM_NOISE_REDUCTION_MODE_HIGH_QUALITY,
+ CAM_NOISE_REDUCTION_MODE_MINIMAL,
+ CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG
+} cam_noise_reduction_mode_t;
+
+typedef enum {
+ CAM_EDGE_MODE_OFF,
+ CAM_EDGE_MODE_FAST,
+ CAM_EDGE_MODE_HIGH_QUALITY,
+ CAM_EDGE_MODE_ZERO_SHUTTER_LAG,
+} cam_edge_mode_t;
+
+typedef struct {
+ uint8_t edge_mode;
+ int32_t sharpness;
+} cam_edge_application_t;
+
+typedef enum {
+ CAM_BLACK_LEVEL_LOCK_OFF,
+ CAM_BLACK_LEVEL_LOCK_ON,
+} cam_black_level_lock_t;
+
+typedef enum {
+ CAM_HOTPIXEL_MODE_OFF,
+ CAM_HOTPIXEL_MODE_FAST,
+ CAM_HOTPIXEL_MODE_HIGH_QUALITY,
+} cam_hotpixel_mode_t;
+
+typedef enum {
+ CAM_LENS_SHADING_MAP_MODE_OFF,
+ CAM_LENS_SHADING_MAP_MODE_ON,
+} cam_lens_shading_map_mode_t;
+
+typedef enum {
+ CAM_LENS_SHADING_MODE_OFF,
+ CAM_LENS_SHADING_MODE_FAST,
+ CAM_LENS_SHADING_MODE_HIGH_QUALITY,
+} cam_lens_shading_mode_t;
+
+typedef enum {
+ CAM_FACE_DETECT_MODE_OFF,
+ CAM_FACE_DETECT_MODE_SIMPLE,
+ CAM_FACE_DETECT_MODE_FULL,
+} cam_face_detect_mode_t;
+
+typedef enum {
+ CAM_TONEMAP_MODE_CONTRAST_CURVE,
+ CAM_TONEMAP_MODE_FAST,
+ CAM_TONEMAP_MODE_HIGH_QUALITY,
+} cam_tonemap_mode_t;
+
+typedef enum {
+ CAM_CDS_MODE_OFF,
+ CAM_CDS_MODE_ON,
+ CAM_CDS_MODE_AUTO,
+ CAM_CDS_MODE_LOCK,
+ CAM_CDS_MODE_MAX
+} cam_cds_mode_type_t;
+
+typedef enum {
+ CAM_SENSOR_HDR_OFF,
+ CAM_SENSOR_HDR_IN_SENSOR = 1,
+ CAM_SENSOR_HDR_ZIGZAG,
+ CAM_SENSOR_HDR_MAX,
+} cam_sensor_hdr_type_t;
+
+typedef struct {
+ int32_t left;
+ int32_t top;
+ int32_t width;
+ int32_t height;
+} cam_rect_t;
+
+typedef struct {
+ cam_rect_t rect;
+ int32_t weight; /* weight of the area, valid for focusing/metering areas */
+} cam_area_t;
+
+typedef enum {
+ CAM_STREAMING_MODE_CONTINUOUS, /* continous streaming */
+ CAM_STREAMING_MODE_BURST, /* burst streaming */
+ CAM_STREAMING_MODE_BATCH, /* stream frames in batches */
+ CAM_STREAMING_MODE_MAX
+} cam_streaming_mode_t;
+
+typedef enum {
+ IS_TYPE_NONE,
+ IS_TYPE_DIS,
+ IS_TYPE_GA_DIS,
+ IS_TYPE_EIS_1_0,
+ IS_TYPE_EIS_2_0,
+ IS_TYPE_EIS_3_0,
+ IS_TYPE_MAX
+} cam_is_type_t;
+
+typedef enum {
+ DIS_DISABLE,
+ DIS_ENABLE
+} cam_dis_mode_t;
+
+typedef enum {
+ NON_SECURE,
+ SECURE
+} cam_stream_secure_t;
+
+#define CAM_REPROCESS_MASK_TYPE_WNR (1<<0)
+
+/* event from server */
+typedef enum {
+ CAM_EVENT_TYPE_MAP_UNMAP_DONE = (1<<0),
+ CAM_EVENT_TYPE_AUTO_FOCUS_DONE = (1<<1),
+ CAM_EVENT_TYPE_ZOOM_DONE = (1<<2),
+ CAM_EVENT_TYPE_DAEMON_DIED = (1<<3),
+ CAM_EVENT_TYPE_INT_TAKE_JPEG = (1<<4),
+ CAM_EVENT_TYPE_INT_TAKE_RAW = (1<<5),
+ CAM_EVENT_TYPE_DAEMON_PULL_REQ = (1<<6),
+ CAM_EVENT_TYPE_CAC_DONE = (1<<7),
+ CAM_EVENT_TYPE_MAX
+} cam_event_type_t;
+
+typedef enum {
+ CAM_EXP_BRACKETING_OFF,
+ CAM_EXP_BRACKETING_ON
+} cam_bracket_mode;
+
+typedef enum {
+ CAM_LOW_LIGHT_OFF = 0,
+ CAM_LOW_LIGHT_ON,
+} cam_low_light_mode_t;
+
+typedef struct {
+ cam_bracket_mode mode;
+ char values[MAX_EXP_BRACKETING_LENGTH]; /* user defined values */
+} cam_exp_bracketing_t;
+
+typedef struct {
+ uint32_t num_frames;
+ cam_exp_bracketing_t exp_val;
+} cam_hdr_bracketing_info_t;
+
+ typedef struct {
+ cam_bracket_mode mode;
+ int32_t values; /* user defined values */
+} cam_capture_bracketing_t;
+
+typedef enum {
+ CAM_SETTINGS_TYPE_OFF,
+ CAM_SETTINGS_TYPE_ON,
+ CAM_SETTINGS_TYPE_AUTO
+} cam_manual_setting_mode;
+
+typedef struct {
+ cam_manual_setting_mode exp_mode;
+ int64_t exp_time;
+ cam_manual_setting_mode iso_mode;
+ int32_t iso_value;
+} cam_capture_manual_3A_t;
+
+typedef enum {
+ CAM_CAPTURE_NORMAL,
+ CAM_CAPTURE_FLASH,
+ CAM_CAPTURE_BRACKETING,
+ CAM_CAPTURE_LOW_LIGHT,
+ CAM_CAPTURE_RESET,
+ CAM_CAPTURE_MANUAL_3A,
+ CAM_CAPTURE_MAX
+} cam_capture_type;
+
+typedef struct {
+ int32_t num_frames; /*Num of frames requested on this quality*/
+ cam_capture_type type; /*type of the capture request*/
+
+ /*union to strore values of capture type*/
+ union {
+ cam_flash_mode_t flash_mode;
+ cam_capture_bracketing_t hdr_mode;
+ cam_low_light_mode_t low_light_mode;
+ cam_capture_manual_3A_t manual_3A_mode;
+ };
+} cam_capture_settings_t;
+
+typedef struct {
+ uint32_t num_batch; /*Number of frames batch requested*/
+ cam_capture_settings_t configs[MAX_CAPTURE_BATCH_NUM];
+} cam_capture_frame_config_t;
+
+typedef struct {
+ uint8_t stepsize;
+ uint8_t direction;
+ int32_t num_steps;
+ uint8_t ttype;
+} tune_actuator_t;
+
+typedef struct {
+ uint8_t module;
+ uint8_t type;
+ int32_t value;
+} tune_cmd_t;
+
+typedef enum {
+ CAM_AEC_ROI_OFF,
+ CAM_AEC_ROI_ON
+} cam_aec_roi_ctrl_t;
+
+typedef enum {
+ CAM_AEC_ROI_BY_INDEX,
+ CAM_AEC_ROI_BY_COORDINATE,
+} cam_aec_roi_type_t;
+
+typedef struct {
+ uint32_t x;
+ uint32_t y;
+} cam_coordinate_type_t;
+
+typedef struct {
+ int32_t numerator;
+ int32_t denominator;
+} cam_rational_type_t;
+
+typedef struct {
+ cam_aec_roi_ctrl_t aec_roi_enable;
+ cam_aec_roi_type_t aec_roi_type;
+ union {
+ cam_coordinate_type_t coordinate[MAX_ROI];
+ uint32_t aec_roi_idx[MAX_ROI];
+ } cam_aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+ uint32_t frm_id;
+ uint8_t num_roi;
+ cam_rect_t roi[MAX_ROI];
+ int32_t weight[MAX_ROI];
+ uint8_t is_multiwindow;
+} cam_roi_info_t;
+
+typedef enum {
+ CAM_WAVELET_DENOISE_YCBCR_PLANE,
+ CAM_WAVELET_DENOISE_CBCR_ONLY,
+ CAM_WAVELET_DENOISE_STREAMLINE_YCBCR,
+ CAM_WAVELET_DENOISE_STREAMLINED_CBCR
+} cam_denoise_process_type_t;
+
+typedef struct {
+ uint8_t denoise_enable;
+ cam_denoise_process_type_t process_plates;
+} cam_denoise_param_t;
+
+#define CAM_FACE_PROCESS_MASK_DETECTION (1U<<0)
+#define CAM_FACE_PROCESS_MASK_RECOGNITION (1U<<1)
+#define CAM_FACE_PROCESS_MASK_FOCUS (1U<<2)
+#define CAM_FACE_PROCESS_MASK_BLINK (1U<<3)
+#define CAM_FACE_PROCESS_MASK_SMILE (1U<<4)
+#define CAM_FACE_PROCESS_MASK_GAZE (1U<<5)
+
+typedef struct {
+ uint32_t fd_mode; /* mask of face process */
+ uint32_t num_fd;
+} cam_fd_set_parm_t;
+
+typedef enum {
+ QCAMERA_FD_PREVIEW,
+ QCAMERA_FD_SNAPSHOT
+}qcamera_face_detect_type_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_EYE_L_PUPIL,
+ CAM_FACE_CT_POINT_EYE_L_IN,
+ CAM_FACE_CT_POINT_EYE_L_OUT,
+ CAM_FACE_CT_POINT_EYE_L_UP,
+ CAM_FACE_CT_POINT_EYE_L_DOWN,
+ CAM_FACE_CT_POINT_EYE_R_PUPIL,
+ CAM_FACE_CT_POINT_EYE_R_IN,
+ CAM_FACE_CT_POINT_EYE_R_OUT,
+ CAM_FACE_CT_POINT_EYE_R_UP,
+ CAM_FACE_CT_POINT_EYE_R_DOWN,
+ CAM_FACE_CT_POINT_EYE_MAX
+} cam_face_ct_point_eye_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_FOREHEAD,
+ CAM_FACE_CT_POINT_FOREHEAD_MAX
+} cam_face_ct_point_forh_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_NOSE,
+ CAM_FACE_CT_POINT_NOSE_TIP,
+ CAM_FACE_CT_POINT_NOSE_L,
+ CAM_FACE_CT_POINT_NOSE_R,
+ CAM_FACE_CT_POINT_NOSE_L_0,
+ CAM_FACE_CT_POINT_NOSE_R_0,
+ CAM_FACE_CT_POINT_NOSE_L_1,
+ CAM_FACE_CT_POINT_NOSE_R_1,
+ CAM_FACE_CT_POINT_NOSE_MAX
+} cam_face_ct_point_nose_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_MOUTH_L,
+ CAM_FACE_CT_POINT_MOUTH_R,
+ CAM_FACE_CT_POINT_MOUTH_UP,
+ CAM_FACE_CT_POINT_MOUTH_DOWN,
+ CAM_FACE_CT_POINT_MOUTH_MAX
+} cam_face_ct_point_mouth_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_LIP_UP,
+ CAM_FACE_CT_POINT_LIP_DOWN,
+ CAM_FACE_CT_POINT_LIP_MAX
+} cam_face_ct_point_lip_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_BROW_L_UP,
+ CAM_FACE_CT_POINT_BROW_L_DOWN,
+ CAM_FACE_CT_POINT_BROW_L_IN,
+ CAM_FACE_CT_POINT_BROW_L_OUT,
+ CAM_FACE_CT_POINT_BROW_R_UP,
+ CAM_FACE_CT_POINT_BROW_R_DOWN,
+ CAM_FACE_CT_POINT_BROW_R_IN,
+ CAM_FACE_CT_POINT_BROW_R_OUT,
+ CAM_FACE_CT_POINT_BROW_MAX
+} cam_face_ct_point_brow_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_CHIN,
+ CAM_FACE_CT_POINT_CHIN_L,
+ CAM_FACE_CT_POINT_CHIN_R,
+ CAM_FACE_CT_POINT_CHIN_MAX
+} cam_face_ct_point_chin_t;
+
+typedef enum {
+ CAM_FACE_CT_POINT_EAR_L_DOWN,
+ CAM_FACE_CT_POINT_EAR_R_DOWN,
+ CAM_FACE_CT_POINT_EAR_L_UP,
+ CAM_FACE_CT_POINT_EAR_R_UP,
+ CAM_FACE_CT_POINT_EAR_MAX
+} cam_face_ct_point_ear_t;
+
+typedef struct {
+ uint8_t is_eye_valid;
+ cam_coordinate_type_t contour_eye_pt[CAM_FACE_CT_POINT_EYE_MAX];
+ uint8_t is_forehead_valid;
+ cam_coordinate_type_t contour_forh_pt[CAM_FACE_CT_POINT_FOREHEAD_MAX];
+ uint8_t is_nose_valid;
+ cam_coordinate_type_t contour_nose_pt[CAM_FACE_CT_POINT_NOSE_MAX];
+ uint8_t is_mouth_valid;
+ cam_coordinate_type_t contour_mouth_pt[CAM_FACE_CT_POINT_MOUTH_MAX];
+ uint8_t is_lip_valid;
+ cam_coordinate_type_t contour_lip_pt[CAM_FACE_CT_POINT_LIP_MAX];
+ uint8_t is_brow_valid;
+ cam_coordinate_type_t contour_brow_pt[CAM_FACE_CT_POINT_BROW_MAX];
+ uint8_t is_chin_valid;
+ cam_coordinate_type_t contour_chin_pt[CAM_FACE_CT_POINT_CHIN_MAX];
+ uint8_t is_ear_valid;
+ cam_coordinate_type_t contour_ear_pt[CAM_FACE_CT_POINT_EAR_MAX];
+} cam_face_contour_info_t;
+
+typedef struct {
+ cam_face_contour_info_t face_contour[MAX_ROI];
+} cam_face_contour_data_t;
+
+typedef struct {
+ cam_coordinate_type_t left_eye_center; /* coordinate of center of left eye */
+ cam_coordinate_type_t right_eye_center; /* coordinate of center of right eye */
+ cam_coordinate_type_t mouth_center; /* coordinate of center of mouth */
+} cam_face_landmarks_info_t;
+
+typedef struct {
+ cam_face_landmarks_info_t face_landmarks[MAX_ROI];
+} cam_face_landmarks_data_t;
+
+typedef struct {
+ uint8_t smile_degree; /* smile degree (0, -100) */
+ uint8_t smile_confidence; /* smile confidence (0, 100) */
+} cam_face_smile_info_t;
+
+typedef struct {
+ cam_face_smile_info_t smile[MAX_ROI];
+} cam_face_smile_data_t;
+
+typedef struct {
+ int8_t gaze_angle; /* -90 -45 0 45 90 for head left to rigth tilt */
+ int32_t updown_dir; /* up down direction (-180, 179) */
+ int32_t leftright_dir; /* left right direction (-180, 179) */
+ int32_t roll_dir; /* roll direction (-180, 179) */
+ int8_t left_right_gaze; /* left right gaze degree (-50, 50) */
+ int8_t top_bottom_gaze; /* up down gaze degree (-50, 50) */
+} cam_face_gaze_info_t;
+
+typedef struct {
+ cam_face_gaze_info_t gaze[MAX_ROI];
+} cam_face_gaze_data_t;
+
+typedef struct {
+ uint8_t blink_detected; /* if blink is detected */
+ uint8_t left_blink; /* left eye blink degeree (0, -100) */
+ uint8_t right_blink; /* right eye blink degree (0, - 100) */
+} cam_face_blink_info_t;
+
+typedef struct {
+ cam_face_blink_info_t blink[MAX_ROI];
+} cam_face_blink_data_t;
+
+typedef struct {
+ uint8_t face_recognised; /* if face is recognised */
+ uint32_t unique_id; /* if face is recognised */
+} cam_face_recog_info_t;
+
+typedef struct {
+ cam_face_recog_info_t face_rec[MAX_ROI];
+} cam_face_recog_data_t;
+
+typedef struct {
+ int32_t face_id; /* unique id for face tracking within view unless view changes */
+ int8_t score; /* score of confidence (0, -100) */
+ cam_rect_t face_boundary; /* boundary of face detected */
+} cam_face_detection_info_t;
+
+typedef struct {
+ uint32_t frame_id; /* frame index of which faces are detected */
+ uint8_t num_faces_detected; /* number of faces detected */
+ cam_face_detection_info_t faces[MAX_ROI]; /* detailed information of faces detected */
+ qcamera_face_detect_type_t fd_type; /* face detect for preview or snapshot frame*/
+ cam_dimension_t fd_frame_dim; /* frame dims on which fd is applied */
+ uint8_t update_flag; /* flag to inform whether HAL needs to send cb
+ * to app or not */
+} cam_face_detection_data_t;
+
+// definition of composite face detection data
+typedef struct {
+ cam_face_detection_data_t detection_data;
+
+ int8_t recog_valid;
+ cam_face_recog_data_t recog_data;
+
+ int8_t blink_valid;
+ cam_face_blink_data_t blink_data;
+
+ int8_t gaze_valid;
+ cam_face_gaze_data_t gaze_data;
+
+ int8_t smile_valid;
+ cam_face_smile_data_t smile_data;
+
+ int8_t landmark_valid;
+ cam_face_landmarks_data_t landmark_data;
+
+ int8_t contour_valid;
+ cam_face_contour_data_t contour_data;
+} cam_faces_data_t;
+
+#define CAM_HISTOGRAM_STATS_SIZE 256
+typedef struct {
+ uint32_t max_hist_value;
+ uint32_t hist_buf[CAM_HISTOGRAM_STATS_SIZE]; /* buf holding histogram stats data */
+} cam_histogram_data_t;
+
+typedef struct {
+ cam_histogram_data_t r_stats;
+ cam_histogram_data_t b_stats;
+ cam_histogram_data_t gr_stats;
+ cam_histogram_data_t gb_stats;
+} cam_bayer_hist_stats_t;
+
+typedef enum {
+ CAM_HISTOGRAM_TYPE_BAYER,
+ CAM_HISTOGRAM_TYPE_YUV
+} cam_histogram_type_t;
+
+typedef struct {
+ cam_histogram_type_t type;
+ union {
+ cam_bayer_hist_stats_t bayer_stats;
+ cam_histogram_data_t yuv_stats;
+ };
+} cam_hist_stats_t;
+
+enum cam_focus_distance_index{
+ CAM_FOCUS_DISTANCE_NEAR_INDEX, /* 0 */
+ CAM_FOCUS_DISTANCE_OPTIMAL_INDEX,
+ CAM_FOCUS_DISTANCE_FAR_INDEX,
+ CAM_FOCUS_DISTANCE_MAX_INDEX
+};
+
+typedef struct {
+ float focus_distance[CAM_FOCUS_DISTANCE_MAX_INDEX];
+} cam_focus_distances_info_t;
+
+typedef struct {
+ uint32_t scale;
+ float diopter;
+} cam_focus_pos_info_t ;
+
+typedef struct {
+ float focalLengthRatio;
+} cam_focal_length_ratio_t;
+
+typedef struct {
+ uint8_t needFlush;
+ uint32_t focused_frame_idx;
+} cam_af_flush_info_t;
+
+/* Different autofocus cycle when calling do_autoFocus
+ * CAM_AF_COMPLETE_EXISTING_SWEEP: Complete existing sweep
+ * if one is ongoing, and lock.
+ * CAM_AF_DO_ONE_FULL_SWEEP: Do one full sweep, regardless
+ * of the current state, and lock.
+ * CAM_AF_START_CONTINUOUS_SWEEP: Start continous sweep.
+ * After do_autoFocus, HAL receives an event: CAM_AF_FOCUSED,
+ * or CAM_AF_NOT_FOCUSED.
+ * cancel_autoFocus stops any lens movement.
+ * Each do_autoFocus call only produces 1 FOCUSED/NOT_FOCUSED
+ * event, not both.
+ */
+typedef enum {
+ CAM_AF_COMPLETE_EXISTING_SWEEP,
+ CAM_AF_DO_ONE_FULL_SWEEP,
+ CAM_AF_START_CONTINUOUS_SWEEP
+} cam_autofocus_cycle_t;
+
+typedef enum {
+ CAM_AF_SCANNING,
+ CAM_AF_FOCUSED,
+ CAM_AF_NOT_FOCUSED,
+ CAM_CAF_SCANNING,
+ CAM_CAF_FOCUSED,
+ CAM_CAF_NOT_FOCUSED,
+ CAM_AF_INACTIVE
+} cam_autofocus_state_t;
+
+//Don't change the order of the AF states below. It should match
+//with the corresponding enum in frameworks (camera3.h and
+//CameraMetadata.java)
+typedef enum {
+ CAM_AF_STATE_INACTIVE,
+ CAM_AF_STATE_PASSIVE_SCAN,
+ CAM_AF_STATE_PASSIVE_FOCUSED,
+ CAM_AF_STATE_ACTIVE_SCAN,
+ CAM_AF_STATE_FOCUSED_LOCKED,
+ CAM_AF_STATE_NOT_FOCUSED_LOCKED,
+ CAM_AF_STATE_PASSIVE_UNFOCUSED
+} cam_af_state_t;
+
+typedef struct {
+ cam_af_state_t focus_state; /* state of focus */
+ cam_focus_distances_info_t focus_dist; /* focus distance */
+ cam_focus_mode_type focus_mode; /* focus mode from backend */
+ int32_t focus_pos;
+ cam_af_flush_info_t flush_info;
+} cam_auto_focus_data_t;
+
+typedef struct {
+ uint32_t stream_id;
+ cam_rect_t crop;
+ cam_rect_t roi_map;
+} cam_stream_crop_info_t;
+
+typedef struct {
+ uint8_t num_of_streams;
+ cam_stream_crop_info_t crop_info[MAX_NUM_STREAMS];
+} cam_crop_data_t;
+
+typedef struct {
+ uint32_t stream_id;
+ uint32_t cds_enable;
+} cam_stream_cds_info_t;
+
+typedef struct {
+ uint8_t session_cds_enable;
+ uint8_t num_of_streams;
+ cam_stream_cds_info_t cds_info[MAX_NUM_STREAMS];
+} cam_cds_data_t;
+
+typedef enum {
+ DO_NOT_NEED_FUTURE_FRAME,
+ NEED_FUTURE_FRAME,
+} cam_prep_snapshot_state_t;
+
+#define CC_GAINS_COUNT 4
+
+typedef struct {
+ float gains[CC_GAINS_COUNT];
+} cam_color_correct_gains_t;
+
+typedef struct {
+ // If LED is ON and Burst Num > 1, this is first LED ON frame
+ uint32_t min_frame_idx;
+ // If LED is ON and Burst Num > 1, this is first LED Off frame after ON
+ uint32_t max_frame_idx;
+ // Used only when LED Is ON and burst num > 1
+ uint32_t num_led_on_frames;
+ // Skip count after LED is turned OFF
+ uint32_t frame_skip_count;
+ // Batch id for each picture request
+ uint32_t config_batch_idx;
+} cam_frame_idx_range_t;
+
+typedef enum {
+ S_NORMAL = 0,
+ S_SCENERY,
+ S_PORTRAIT,
+ S_PORTRAIT_BACKLIGHT,
+ S_SCENERY_BACKLIGHT,
+ S_BACKLIGHT,
+ S_HDR,
+ S_MAX_DEFAULT,
+ S_CUSTOM0 = S_MAX_DEFAULT,
+ S_CUSTOM1,
+ S_CUSTOM2,
+ S_CUSTOM3,
+ S_CUSTOM4,
+ S_CUSTOM5,
+ S_CUSTOM6,
+ S_CUSTOM7,
+ S_CUSTOM8,
+ S_CUSTOM9,
+ S_MAX,
+} cam_auto_scene_t;
+
+typedef struct {
+ uint32_t is_hdr_scene;
+ float hdr_confidence;
+} cam_asd_hdr_scene_data_t;
+
+typedef struct {
+ uint32_t detected;
+ float confidence;
+ uint32_t auto_compensation;
+} cam_asd_scene_info_t;
+
+typedef struct {
+ cam_auto_scene_t detected_scene;
+ uint8_t max_n_scenes;
+ cam_asd_scene_info_t scene_info[S_MAX];
+} cam_asd_decision_t;
+
+
+typedef struct {
+ uint32_t meta_frame_id;
+} cam_meta_valid_t;
+
+typedef enum {
+ CAM_SENSOR_RAW,
+ CAM_SENSOR_YUV,
+ CAM_SENSOR_MONO
+} cam_sensor_t;
+
+typedef struct {
+ cam_flash_mode_t flash_mode;
+ float aperture_value;
+ cam_flash_state_t flash_state;
+ float focal_length;
+ float f_number;
+ int32_t sensing_method;
+ float crop_factor;
+ cam_sensor_t sens_type;
+} cam_sensor_params_t;
+
+typedef enum {
+ CAM_METERING_MODE_UNKNOWN = 0,
+ CAM_METERING_MODE_AVERAGE = 1,
+ CAM_METERING_MODE_CENTER_WEIGHTED_AVERAGE = 2,
+ CAM_METERING_MODE_SPOT = 3,
+ CAM_METERING_MODE_MULTI_SPOT = 4,
+ CAM_METERING_MODE_PATTERN = 5,
+ CAM_METERING_MODE_PARTIAL = 6,
+ CAM_METERING_MODE_OTHER = 255,
+} cam_metering_mode_t;
+
+typedef struct {
+ float exp_time;
+ int32_t iso_value;
+ uint32_t flash_needed;
+ uint32_t settled;
+ cam_wb_mode_type wb_mode;
+ uint32_t metering_mode;
+ uint32_t exposure_program;
+ uint32_t exposure_mode;
+ uint32_t scenetype;
+ float brightness;
+ float est_snap_exp_time;
+ int32_t est_snap_iso_value;
+ uint32_t est_snap_luma;
+ uint32_t est_snap_target;
+} cam_3a_params_t;
+
+typedef struct {
+ uint64_t sw_version_number;
+ int32_t aec_debug_data_size;
+ char aec_private_debug_data[AEC_DEBUG_DATA_SIZE];
+} cam_ae_exif_debug_t;
+
+typedef struct {
+ int8_t awb_ccm_enable;
+ int8_t hard_awb_ccm_flag;
+ int8_t ccm_update_flag;
+ float ccm[AWB_NUM_CCM_ROWS][AWB_NUM_CCM_COLS];
+ float ccm_offset[AWB_NUM_CCM_ROWS];
+} cam_awb_ccm_update_t;
+
+typedef struct {
+ int32_t cct_value;
+ cam_awb_gain_t rgb_gains;
+ cam_awb_ccm_update_t ccm_update;
+} cam_awb_params_t;
+
+typedef struct {
+ int32_t awb_debug_data_size;
+ char awb_private_debug_data[AWB_DEBUG_DATA_SIZE];
+} cam_awb_exif_debug_t;
+
+/* AF debug data for exif*/
+typedef struct {
+ int32_t af_debug_data_size;
+ char af_private_debug_data[AF_DEBUG_DATA_SIZE];
+} cam_af_exif_debug_t;
+
+typedef struct {
+ int32_t asd_debug_data_size;
+ char asd_private_debug_data[ASD_DEBUG_DATA_SIZE];
+} cam_asd_exif_debug_t;
+
+typedef struct {
+ int32_t bg_stats_buffer_size;
+ int32_t bg_config_buffer_size;
+ char stats_buffer_private_debug_data[STATS_BUFFER_DEBUG_DATA_SIZE];
+} cam_stats_buffer_exif_debug_t;
+
+typedef struct {
+ int32_t be_stats_buffer_size;
+ int32_t be_config_buffer_size;
+ char bestats_buffer_private_debug_data[BESTATS_BUFFER_DEBUG_DATA_SIZE];
+} cam_bestats_buffer_exif_debug_t;
+
+typedef struct {
+ int32_t bhist_stats_buffer_size;
+ char bhist_private_debug_data[BHIST_STATS_DEBUG_DATA_SIZE];
+} cam_bhist_buffer_exif_debug_t;
+
+/* 3A version*/
+typedef struct {
+ uint16_t major_version;
+ uint16_t minor_version;
+ uint16_t patch_version;
+ uint16_t new_feature_des;
+} cam_q3a_version_t;
+
+typedef struct {
+ int32_t tuning_info_buffer_size;
+ char tuning_info_private_debug_data[TUNING_INFO_DEBUG_DATA_SIZE];
+} cam_q3a_tuning_info_t;
+
+typedef struct {
+ uint32_t tuning_data_version;
+ size_t tuning_sensor_data_size;
+ size_t tuning_vfe_data_size;
+ size_t tuning_cpp_data_size;
+ size_t tuning_cac_data_size;
+ size_t tuning_cac_data_size2;
+ size_t tuning_mod3_data_size;
+ uint8_t data[TUNING_DATA_MAX];
+}tuning_params_t;
+
+typedef struct {
+ int32_t event_type;
+ cam_dimension_t dim;
+ size_t size;
+ char path[QCAMERA_MAX_FILEPATH_LENGTH];
+ cam_format_t picture_format;
+} cam_int_evt_params_t;
+
+typedef struct {
+ uint8_t private_isp_data[MAX_ISP_DATA_SIZE];
+} cam_chromatix_lite_isp_t;
+
+typedef struct {
+ uint8_t private_pp_data[MAX_PP_DATA_SIZE];
+} cam_chromatix_lite_pp_t;
+
+typedef struct {
+ uint8_t private_stats_data[MAX_AE_STATS_DATA_SIZE];
+} cam_chromatix_lite_ae_stats_t;
+
+typedef struct {
+ uint8_t private_stats_data[MAX_AWB_STATS_DATA_SIZE];
+} cam_chromatix_lite_awb_stats_t;
+
+typedef struct {
+ uint8_t private_stats_data[MAX_AF_STATS_DATA_SIZE];
+} cam_chromatix_lite_af_stats_t;
+
+typedef struct {
+ uint8_t private_stats_data[MAX_ASD_STATS_DATA_SIZE];
+} cam_chromatix_lite_asd_stats_t;
+
+typedef struct {
+ uint32_t min_buffers;
+ uint32_t max_buffers;
+} cam_buffer_info_t;
+
+typedef enum {
+ /* Standalone camera (won't be linked) */
+ CAM_TYPE_STANDALONE=0,
+ /* Main camera of the related cam subsystem which controls
+ HW sync at sensor level*/
+ CAM_TYPE_MAIN,
+ /* Aux camera of the related cam subsystem */
+ CAM_TYPE_AUX
+} cam_sync_type_t;
+
+typedef struct {
+ cam_dimension_t stream_sizes[MAX_NUM_STREAMS];
+ uint32_t num_streams;
+ cam_stream_type_t type[MAX_NUM_STREAMS];
+ cam_feature_mask_t postprocess_mask[MAX_NUM_STREAMS];
+ cam_buffer_info_t buffer_info;
+ cam_is_type_t is_type[MAX_NUM_STREAMS];
+ cam_hfr_mode_t hfr_mode;
+ cam_format_t format[MAX_NUM_STREAMS];
+ uint32_t buf_alignment;
+ uint32_t min_stride;
+ uint32_t min_scanline;
+ uint8_t batch_size;
+ cam_sync_type_t sync_type;
+} cam_stream_size_info_t;
+
+
+typedef enum {
+ CAM_INTF_OVERWRITE_MINI_CHROMATIX_OFFLINE,
+ CAM_INTF_OVERWRITE_ISP_HW_DATA_OFFLINE,
+ CAM_INTF_OVERWRITE_MINI_CHROMATIX_ONLINE,
+ CAM_INTF_OVERWRITE_ISP_HW_DATA_ONLINE,
+ CAM_INTF_OVERWRITE_MAX,
+} cam_intf_overwrite_type_t;
+
+typedef struct {
+ uint8_t lds_enabled;
+ float rnr_sampling_factor;
+} cam_img_hysterisis_info_t;
+
+typedef struct {
+ /* dynamic feature enablement */
+ uint64_t dyn_feature_mask;
+ /* input frame count for stacking */
+ uint32_t input_count;
+ /* reserved for future use */
+ uint8_t reserved[32];
+} cam_dyn_img_data_t;
+
+typedef struct {
+ cam_intf_overwrite_type_t overwrite_type;
+ char isp_hw_data_list[4096]; /*add upper bound memory, customer to fill*/
+ char chromatix_data_overwrite[4096]; /*add bound memory, customer fill*/
+} cam_hw_data_overwrite_t;
+
+typedef struct {
+ uint32_t num_streams;
+ uint32_t streamID[MAX_NUM_STREAMS];
+} cam_stream_ID_t;
+
+/*CAC Message posted during pipeline*/
+typedef struct {
+ uint32_t frame_id;
+ int32_t buf_idx;
+} cam_cac_info_t;
+
+typedef struct
+{
+ uint32_t id; /* Frame ID */
+ uint64_t timestamp; /* Time stamp */
+ uint32_t distance_in_mm; /* Distance of object in ROI's in milimeters */
+ uint32_t confidence; /* Confidence on distance from 0(No confidence)to 1024(max) */
+ uint32_t status; /* Status of DCRF library execution call */
+ cam_rect_t focused_roi; /* ROI's for which distance is estimated */
+ uint32_t focused_x; /* Focus location X inside ROI with distance estimation */
+ uint32_t focused_y; /* Focus location Y inside ROI with distance estimation */
+} cam_dcrf_result_t;
+
+typedef struct {
+ uint32_t frame_id;
+ uint32_t num_streams;
+ uint32_t stream_id[MAX_NUM_STREAMS];
+} cam_buf_divert_info_t;
+
+typedef struct {
+ uint8_t is_stats_valid; /* if histgram data is valid */
+ cam_hist_stats_t stats_data; /* histogram data */
+
+ uint8_t is_faces_valid; /* if face detection data is valid */
+ cam_face_detection_data_t faces_data; /* face detection result */
+
+ uint8_t is_focus_valid; /* if focus data is valid */
+ cam_auto_focus_data_t focus_data; /* focus data */
+
+ uint8_t is_crop_valid; /* if crop data is valid */
+ cam_crop_data_t crop_data; /* crop data */
+
+ uint8_t is_prep_snapshot_done_valid; /* if prep snapshot done is valid */
+ cam_prep_snapshot_state_t prep_snapshot_done_state; /* prepare snapshot done state */
+
+ uint8_t is_cac_valid; /* if cac info is valid */
+ cam_cac_info_t cac_info; /* cac info */
+
+ /* Hysterisis data from Img modules */
+ uint8_t is_hyst_info_valid; /* if hyst info is valid */
+ cam_img_hysterisis_info_t img_hyst_info; /* hyst info */
+
+ /* if good frame idx range is valid */
+ uint8_t is_good_frame_idx_range_valid;
+ /* good frame idx range, make sure:
+ * 1. good_frame_idx_range.min_frame_idx > current_frame_idx
+ * 2. good_frame_idx_range.min_frame_idx - current_frame_idx < 100 */
+ cam_frame_idx_range_t good_frame_idx_range;
+
+ cam_asd_decision_t cam_asd_info;
+
+ char private_metadata[MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES];
+
+ /* AE parameters */
+ uint8_t is_3a_params_valid;
+ cam_3a_params_t cam_3a_params;
+
+ /* AE exif debug parameters */
+ uint8_t is_ae_exif_debug_valid;
+ cam_ae_exif_debug_t ae_exif_debug_params;
+
+ /* AWB exif debug parameters */
+ uint8_t is_awb_exif_debug_valid;
+ cam_awb_exif_debug_t awb_exif_debug_params;
+
+ /* AF exif debug parameters */
+ uint8_t is_af_exif_debug_valid;
+ cam_af_exif_debug_t af_exif_debug_params;
+
+ /* ASD exif debug parameters */
+ uint8_t is_asd_exif_debug_valid;
+ cam_asd_exif_debug_t asd_exif_debug_params;
+
+ /* Stats buffer exif debug parameters */
+ uint8_t is_stats_buffer_exif_debug_valid;
+ cam_stats_buffer_exif_debug_t stats_buffer_exif_debug_params;
+
+ /* BE stats buffer exif debug parameters */
+ uint8_t is_bestats_buffer_exif_debug_valid;
+ cam_bestats_buffer_exif_debug_t bestats_buffer_exif_debug_params;
+
+ /* Bhist exif debug parameters. */
+ uint8_t is_bhist_exif_debug_valid;
+ cam_bhist_buffer_exif_debug_t bhist_exif_debug_params;
+
+ /* AWB parameters */
+ uint8_t is_awb_params_valid;
+ cam_awb_params_t awb_params;
+
+ /* sensor parameters */
+ uint8_t is_sensor_params_valid;
+ cam_sensor_params_t sensor_params;
+
+ /* Meta valid params */
+ uint8_t is_meta_valid;
+ cam_meta_valid_t meta_valid_params;
+
+ /*Tuning Data*/
+ uint8_t is_tuning_params_valid;
+ tuning_params_t tuning_params;
+
+ uint8_t is_chromatix_lite_isp_valid;
+ cam_chromatix_lite_isp_t chromatix_lite_isp_data;
+
+ uint8_t is_chromatix_lite_pp_valid;
+ cam_chromatix_lite_pp_t chromatix_lite_pp_data;
+
+ uint8_t is_chromatix_lite_ae_stats_valid;
+ cam_chromatix_lite_ae_stats_t chromatix_lite_ae_stats_data;
+
+ uint8_t is_chromatix_lite_awb_stats_valid;
+ cam_chromatix_lite_awb_stats_t chromatix_lite_awb_stats_data;
+
+ uint8_t is_chromatix_lite_af_stats_valid;
+ cam_chromatix_lite_af_stats_t chromatix_lite_af_stats_data;
+
+ uint8_t is_dcrf_result_valid;
+ cam_dcrf_result_t dcrf_result;
+
+ /* Dynamic feature enablement from postprocessing modules */
+ uint8_t is_dyn_img_data_valid;
+ cam_dyn_img_data_t dyn_img_data;
+
+} cam_metadata_info_t;
+
+typedef enum {
+ CAM_INTF_PARM_HAL_VERSION = 0x1,
+
+ /* Overall mode of 3A control routines. We need to have this parameter
+ * because not all android.control.* have an OFF option, for example,
+ * AE_FPS_Range, aePrecaptureTrigger */
+ CAM_INTF_META_MODE,
+ /* Whether AE is currently updating the sensor exposure and sensitivity
+ * fields */
+ CAM_INTF_META_AEC_MODE,
+ CAM_INTF_PARM_WHITE_BALANCE,
+ CAM_INTF_PARM_FOCUS_MODE,
+
+ /* common between HAL1 and HAL3 */
+ CAM_INTF_PARM_ANTIBANDING,
+ CAM_INTF_PARM_EXPOSURE_COMPENSATION,
+ CAM_INTF_PARM_EV_STEP,
+ CAM_INTF_PARM_AEC_LOCK,
+ CAM_INTF_PARM_FPS_RANGE, /* 10 */
+ CAM_INTF_PARM_AWB_LOCK,
+ CAM_INTF_PARM_EFFECT,
+ CAM_INTF_PARM_BESTSHOT_MODE,
+ CAM_INTF_PARM_DIS_ENABLE,
+ CAM_INTF_PARM_LED_MODE,
+ CAM_INTF_META_HISTOGRAM,
+ CAM_INTF_META_FACE_DETECTION,
+ /* Whether optical image stabilization is enabled. */
+ CAM_INTF_META_LENS_OPT_STAB_MODE,
+ /* specific to HAl1 */
+ CAM_INTF_META_AUTOFOCUS_DATA,
+ CAM_INTF_PARM_QUERY_FLASH4SNAP, /* 20 */
+ CAM_INTF_PARM_EXPOSURE,
+ CAM_INTF_PARM_SHARPNESS,
+ CAM_INTF_PARM_CONTRAST,
+ CAM_INTF_PARM_SATURATION,
+ CAM_INTF_PARM_BRIGHTNESS,
+ CAM_INTF_PARM_ISO,
+ CAM_INTF_PARM_ZOOM,
+ CAM_INTF_PARM_ROLLOFF,
+ CAM_INTF_PARM_MODE, /* camera mode */
+ CAM_INTF_PARM_AEC_ALGO_TYPE, /* 30 */ /* auto exposure algorithm */
+ CAM_INTF_PARM_FOCUS_ALGO_TYPE, /* focus algorithm */
+ CAM_INTF_PARM_AEC_ROI,
+ CAM_INTF_PARM_AF_ROI,
+ CAM_INTF_PARM_SCE_FACTOR,
+ CAM_INTF_PARM_FD,
+ CAM_INTF_PARM_MCE,
+ CAM_INTF_PARM_HFR,
+ CAM_INTF_PARM_REDEYE_REDUCTION,
+ CAM_INTF_PARM_WAVELET_DENOISE,
+ CAM_INTF_PARM_TEMPORAL_DENOISE, /* 40 */
+ CAM_INTF_PARM_HISTOGRAM,
+ CAM_INTF_PARM_ASD_ENABLE,
+ CAM_INTF_PARM_RECORDING_HINT,
+ CAM_INTF_PARM_HDR,
+ CAM_INTF_PARM_MAX_DIMENSION,
+ CAM_INTF_PARM_RAW_DIMENSION,
+ CAM_INTF_PARM_FRAMESKIP,
+ CAM_INTF_PARM_ZSL_MODE, /* indicating if it's running in ZSL mode */
+ CAM_INTF_PARM_BURST_NUM,
+ CAM_INTF_PARM_RETRO_BURST_NUM, /* 50 */
+ CAM_INTF_PARM_BURST_LED_ON_PERIOD,
+ CAM_INTF_PARM_HDR_NEED_1X, /* if HDR needs 1x output */
+ CAM_INTF_PARM_LOCK_CAF,
+ CAM_INTF_PARM_VIDEO_HDR,
+ CAM_INTF_PARM_SENSOR_HDR,
+ CAM_INTF_PARM_ROTATION,
+ CAM_INTF_PARM_SCALE,
+ CAM_INTF_PARM_VT, /* indicating if it's a Video Call Apllication */
+ CAM_INTF_META_CROP_DATA,
+ CAM_INTF_META_PREP_SNAPSHOT_DONE, /* 60 */
+ CAM_INTF_META_GOOD_FRAME_IDX_RANGE,
+ CAM_INTF_META_ASD_HDR_SCENE_DATA,
+ CAM_INTF_META_ASD_SCENE_INFO,
+ CAM_INTF_META_CURRENT_SCENE,
+ CAM_INTF_META_AEC_INFO,
+ CAM_INTF_META_SENSOR_INFO,
+ CAM_INTF_META_CHROMATIX_LITE_ISP,
+ CAM_INTF_META_CHROMATIX_LITE_PP,
+ CAM_INTF_META_CHROMATIX_LITE_AE,
+ CAM_INTF_META_CHROMATIX_LITE_AWB, /* 70 */
+ CAM_INTF_META_CHROMATIX_LITE_AF,
+ CAM_INTF_META_CHROMATIX_LITE_ASD,
+ CAM_INTF_META_EXIF_DEBUG_AE,
+ CAM_INTF_META_EXIF_DEBUG_AWB,
+ CAM_INTF_META_EXIF_DEBUG_AF,
+ CAM_INTF_META_EXIF_DEBUG_ASD,
+ CAM_INTF_META_EXIF_DEBUG_STATS,
+ CAM_INTF_META_EXIF_DEBUG_BESTATS,
+ CAM_INTF_META_EXIF_DEBUG_BHIST,
+ CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
+ CAM_INTF_PARM_GET_CHROMATIX,
+ CAM_INTF_PARM_SET_RELOAD_CHROMATIX,
+ CAM_INTF_PARM_SET_AUTOFOCUSTUNING, /* 80 */
+ CAM_INTF_PARM_GET_AFTUNE,
+ CAM_INTF_PARM_SET_RELOAD_AFTUNE,
+ CAM_INTF_PARM_SET_VFE_COMMAND,
+ CAM_INTF_PARM_SET_PP_COMMAND,
+ CAM_INTF_PARM_TINTLESS,
+ CAM_INTF_PARM_LONGSHOT_ENABLE,
+ CAM_INTF_PARM_RDI_MODE,
+ CAM_INTF_PARM_CDS_MODE,
+ CAM_INTF_PARM_TONE_MAP_MODE,
+ CAM_INTF_PARM_CAPTURE_FRAME_CONFIG, /* 90 */
+ CAM_INTF_PARM_DUAL_LED_CALIBRATION,
+ CAM_INTF_PARM_ADV_CAPTURE_MODE,
+
+ /* stream based parameters */
+ CAM_INTF_PARM_DO_REPROCESS,
+ CAM_INTF_PARM_SET_BUNDLE,
+ CAM_INTF_PARM_STREAM_FLIP,
+ CAM_INTF_PARM_GET_OUTPUT_CROP,
+
+ CAM_INTF_PARM_EZTUNE_CMD,
+ CAM_INTF_PARM_INT_EVT,
+
+ /* specific to HAL3 */
+ /* Whether the metadata maps to a valid frame number */
+ CAM_INTF_META_FRAME_NUMBER_VALID,
+ /* Whether the urgent metadata maps to a valid frame number */
+ CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,
+ /* Whether the stream buffer corresponding this frame is dropped or not */
+ CAM_INTF_META_FRAME_DROPPED, /* 100 */
+ /* COLOR CORRECTION.*/
+ CAM_INTF_META_COLOR_CORRECT_MODE,
+ /* A transform matrix to chromatically adapt pixels in the CIE XYZ (1931)
+ * color space from the scene illuminant to the sRGB-standard D65-illuminant. */
+ CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
+ /*Color channel gains in the Bayer raw domain in the order [RGeGoB]*/
+ CAM_INTF_META_COLOR_CORRECT_GAINS,
+ /*The best fit color transform matrix calculated by the stats*/
+ CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM,
+ /*The best fit color channels gains calculated by the stats*/
+ CAM_INTF_META_PRED_COLOR_CORRECT_GAINS,
+ /* CONTROL */
+ /* A frame counter set by the framework. Must be maintained unchanged in
+ * output frame. */
+ CAM_INTF_META_FRAME_NUMBER,
+ /* A frame counter set by the framework. Must be maintained unchanged in
+ * output frame. */
+ CAM_INTF_META_URGENT_FRAME_NUMBER,
+ /*Number of streams and size of streams in current configuration*/
+ CAM_INTF_META_STREAM_INFO,
+ /* List of areas to use for metering */
+ CAM_INTF_META_AEC_ROI,
+ /* Whether the HAL must trigger precapture metering.*/
+ CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, /* 110 */
+ /* The ID sent with the latest CAMERA2_TRIGGER_PRECAPTURE_METERING call */
+ /* Current state of AE algorithm */
+ CAM_INTF_META_AEC_STATE,
+ /* List of areas to use for focus estimation */
+ CAM_INTF_META_AF_ROI,
+ /* Whether the HAL must trigger autofocus. */
+ CAM_INTF_META_AF_TRIGGER,
+ /* Current state of AF algorithm */
+ CAM_INTF_META_AF_STATE,
+ /* List of areas to use for illuminant estimation */
+ CAM_INTF_META_AWB_REGIONS,
+ /* Current state of AWB algorithm */
+ CAM_INTF_META_AWB_STATE,
+ /*Whether black level compensation is frozen or free to vary*/
+ CAM_INTF_META_BLACK_LEVEL_LOCK,
+ /* Information to 3A routines about the purpose of this capture, to help
+ * decide optimal 3A strategy */
+ CAM_INTF_META_CAPTURE_INTENT,
+ /* DEMOSAIC */
+ /* Controls the quality of the demosaicing processing */
+ CAM_INTF_META_DEMOSAIC,
+ /* EDGE */
+ /* Operation mode for edge enhancement */
+ CAM_INTF_META_EDGE_MODE, /* 120 */
+ /* Control the amount of edge enhancement applied to the images.*/
+ /* 1-10; 10 is maximum sharpening */
+ CAM_INTF_META_SHARPNESS_STRENGTH,
+ /* FLASH */
+ /* Power for flash firing/torch, 10 is max power; 0 is no flash. Linear */
+ CAM_INTF_META_FLASH_POWER,
+ /* Firing time of flash relative to start of exposure, in nanoseconds*/
+ CAM_INTF_META_FLASH_FIRING_TIME,
+ /* Current state of the flash unit */
+ CAM_INTF_META_FLASH_STATE,
+ /* GEOMETRIC */
+ /* Operating mode of geometric correction */
+ CAM_INTF_META_GEOMETRIC_MODE,
+ /* Control the amount of shading correction applied to the images */
+ CAM_INTF_META_GEOMETRIC_STRENGTH,
+ /* HOT PIXEL */
+ /* Set operational mode for hot pixel correction */
+ CAM_INTF_META_HOTPIXEL_MODE,
+ /* LENS */
+ /* Size of the lens aperture */
+ CAM_INTF_META_LENS_APERTURE,
+ /* State of lens neutral density filter(s) */
+ CAM_INTF_META_LENS_FILTERDENSITY,
+ /* Lens optical zoom setting */
+ CAM_INTF_META_LENS_FOCAL_LENGTH, /* 130 */
+ /* Distance to plane of sharpest focus, measured from frontmost surface
+ * of the lens */
+ CAM_INTF_META_LENS_FOCUS_DISTANCE,
+ /* The range of scene distances that are in sharp focus (depth of field) */
+ CAM_INTF_META_LENS_FOCUS_RANGE,
+ /*Whether the hal needs to output the lens shading map*/
+ CAM_INTF_META_LENS_SHADING_MAP_MODE,
+ /* Current lens status */
+ CAM_INTF_META_LENS_STATE,
+ /* NOISE REDUCTION */
+ /* Mode of operation for the noise reduction algorithm */
+ CAM_INTF_META_NOISE_REDUCTION_MODE,
+ /* Control the amount of noise reduction applied to the images.
+ * 1-10; 10 is max noise reduction */
+ CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
+ /* SCALER */
+ /* Top-left corner and width of the output region to select from the active
+ * pixel array */
+ CAM_INTF_META_SCALER_CROP_REGION,
+ /* The estimated scene illumination lighting frequency */
+ CAM_INTF_META_SCENE_FLICKER,
+ /* SENSOR */
+ /* Duration each pixel is exposed to light, in nanoseconds */
+ CAM_INTF_META_SENSOR_EXPOSURE_TIME,
+ /* Duration from start of frame exposure to start of next frame exposure,
+ * in nanoseconds */
+ CAM_INTF_META_SENSOR_FRAME_DURATION, /* 140 */
+ /* Gain applied to image data. Must be implemented through analog gain only
+ * if set to values below 'maximum analog sensitivity'. */
+ CAM_INTF_META_SENSOR_SENSITIVITY,
+ /* Time at start of exposure of first row */
+ CAM_INTF_META_SENSOR_TIMESTAMP,
+ /* Duration b/w start of first row exposure and the start of last
+ row exposure in nanoseconds */
+ CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
+ /* SHADING */
+ /* Quality of lens shading correction applied to the image data */
+ CAM_INTF_META_SHADING_MODE,
+ /* Control the amount of shading correction applied to the images.
+ * unitless: 1-10; 10 is full shading compensation */
+ CAM_INTF_META_SHADING_STRENGTH,
+ /* STATISTICS */
+ /* State of the face detector unit */
+ CAM_INTF_META_STATS_FACEDETECT_MODE,
+ /* Operating mode for histogram generation */
+ CAM_INTF_META_STATS_HISTOGRAM_MODE,
+ /* Operating mode for sharpness map generation */
+ CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
+ /* A 3-channel sharpness map, based on the raw sensor data,
+ * If only a monochrome sharpness map is supported, all channels
+ * should have the same data
+ */
+ CAM_INTF_META_STATS_SHARPNESS_MAP,
+
+ /* TONEMAP */
+ /* Tone map mode */
+ CAM_INTF_META_TONEMAP_MODE, /* 150 */
+ /* Table mapping RGB input values to output values */
+ CAM_INTF_META_TONEMAP_CURVES,
+
+ CAM_INTF_META_FLASH_MODE,
+ /* 2D array of gain factors for each color channel that was used to
+ * compensate for lens shading for this frame */
+ CAM_INTF_META_LENS_SHADING_MAP,
+ CAM_INTF_META_PRIVATE_DATA,
+ CAM_INTF_PARM_STATS_DEBUG_MASK,
+ CAM_INTF_PARM_STATS_AF_PAAF,
+ /* Indicates streams ID of all the requested buffers */
+ CAM_INTF_META_STREAM_ID,
+ CAM_INTF_PARM_FOCUS_BRACKETING,
+ CAM_INTF_PARM_FLASH_BRACKETING,
+ CAM_INTF_PARM_GET_IMG_PROP, /* 160 */
+ CAM_INTF_META_JPEG_GPS_COORDINATES,
+ CAM_INTF_META_JPEG_GPS_PROC_METHODS,
+ CAM_INTF_META_JPEG_GPS_TIMESTAMP,
+ CAM_INTF_META_JPEG_ORIENTATION,
+ CAM_INTF_META_JPEG_QUALITY,
+ CAM_INTF_META_JPEG_THUMB_QUALITY,
+ CAM_INTF_META_JPEG_THUMB_SIZE,
+
+ CAM_INTF_META_TEST_PATTERN_DATA,
+ /* DNG file support */
+ CAM_INTF_META_PROFILE_TONE_CURVE,
+ CAM_INTF_META_NEUTRAL_COL_POINT, /* 170 */
+
+ /* CAC */
+ CAM_INTF_META_CAC_INFO,
+ CAM_INTF_PARM_CAC,
+ CAM_INTF_META_IMG_HYST_INFO,
+
+ /* trigger for all modules to read the debug/log level properties */
+ CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
+
+ /* OTP : WB gr/gb */
+ CAM_INTF_META_OTP_WB_GRGB,
+ /* LED override for EZTUNE */
+ CAM_INTF_META_LED_MODE_OVERRIDE,
+ /* auto lens position info */
+ CAM_INTF_META_FOCUS_POSITION,
+ /* Manual exposure time */
+ CAM_INTF_PARM_EXPOSURE_TIME,
+ /* AWB meta data info */
+ CAM_INTF_META_AWB_INFO,
+ /* Manual lens position info */
+ CAM_INTF_PARM_MANUAL_FOCUS_POS, /* 180 */
+ /* Manual White balance gains */
+ CAM_INTF_PARM_WB_MANUAL,
+ /* Offline Data Overwrite */
+ CAM_INTF_PARM_HW_DATA_OVERWRITE,
+ /* IMG LIB reprocess debug section */
+ CAM_INTF_META_IMGLIB, /* cam_intf_meta_imglib_t */
+ /* OEM specific parameters */
+ CAM_INTF_PARM_CUSTOM,
+ /* parameters added for related cameras */
+ /* fetch calibration info for related cam subsystem */
+ CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION,
+ /* focal length ratio info */
+ CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
+ /* crop for binning & FOV adjust */
+ CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
+ /* crop for trimming edge pixels */
+ CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
+ /* crop for FOV adjust and zoom */
+ CAM_INTF_META_SNAP_CROP_INFO_ISP,
+ /* crop for image-stabilization and zoom */
+ CAM_INTF_META_SNAP_CROP_INFO_CPP, /* 190 */
+ /* parameter for enabling DCRF */
+ CAM_INTF_PARM_DCRF,
+ /* metadata tag for DCRF info */
+ CAM_INTF_META_DCRF,
+ /* FLIP mode parameter*/
+ CAM_INTF_PARM_FLIP,
+ /*Frame divert info from ISP*/
+ CAM_INTF_BUF_DIVERT_INFO,
+ /* Use AV timer */
+ CAM_INTF_META_USE_AV_TIMER,
+ CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
+ /* Special event to request stream frames*/
+ CAM_INTF_PARM_REQUEST_FRAMES,
+ /* Special event to request operational mode*/
+ CAM_INTF_PARM_REQUEST_OPS_MODE,
+ /*Black level parameters*/
+ CAM_INTF_META_LDAF_EXIF,
+ CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN,
+ CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, /* 200 */
+ CAM_INTF_META_CDS_DATA,
+ /*3A low light level information*/
+ CAM_INTF_META_LOW_LIGHT,
+ /* dynamic feature detection */
+ CAM_INTF_META_IMG_DYN_FEAT, /* 200 */
+ /*Parameter entry to communicate manual
+ capture type*/
+ CAM_INTF_PARM_MANUAL_CAPTURE_TYPE,
+ /*AF state change detected by AF module*/
+ CAM_INTF_AF_STATE_TRANSITION,
+ /* face recognition */
+ CAM_INTF_META_FACE_RECOG,
+ /* face blink detection */
+ CAM_INTF_META_FACE_BLINK,
+ /* face gaze detection */
+ CAM_INTF_META_FACE_GAZE,
+ /* face smile detection */
+ CAM_INTF_META_FACE_SMILE,
+ /* face landmark detection */
+ CAM_INTF_META_FACE_LANDMARK, /* 210 */
+ /* face contour detection */
+ CAM_INTF_META_FACE_CONTOUR,
+ /* Whether EIS is enabled */
+ CAM_INTF_META_VIDEO_STAB_MODE,
+ /* Touch exposure compensation (EV) status */
+ CAM_INTF_META_TOUCH_AE_RESULT,
+ /* Param for updating initial exposure index value*/
+ CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX,
+ /* Gain applied post raw captrue.
+ ISP digital gain */
+ CAM_INTF_META_ISP_SENSITIVITY,
+ /* Param for enabling instant aec*/
+ CAM_INTF_PARM_INSTANT_AEC,
+ /* Param for tracking previous reprocessing activity */
+ CAM_INTF_META_REPROCESS_FLAGS,
+ /* Param of cropping information for JPEG encoder */
+ CAM_INTF_PARM_JPEG_ENCODE_CROP,
+ /* Param of scaling information for JPEG encoder */
+ CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
+ /*Param for updating Quadra CFA mode */
+ CAM_INTF_PARM_QUADRA_CFA,
+ CAM_INTF_PARM_MAX
+} cam_intf_parm_type_t;
+
+typedef struct {
+ uint32_t forced;
+ union {
+ uint32_t force_linecount_value;
+ float force_gain_value;
+ float force_snap_exp_value;
+ float force_exp_value;
+ uint32_t force_snap_linecount_value;
+ float force_snap_gain_value;
+ } u;
+} cam_ez_force_params_t;
+
+typedef struct {
+ float cam_black_level[4];
+} cam_black_level_metadata_t;
+
+typedef enum {
+ CAM_EZTUNE_CMD_STATUS,
+ CAM_EZTUNE_CMD_AEC_ENABLE,
+ CAM_EZTUNE_CMD_AWB_ENABLE,
+ CAM_EZTUNE_CMD_AF_ENABLE,
+ CAM_EZTUNE_CMD_AEC_FORCE_LINECOUNT,
+ CAM_EZTUNE_CMD_AEC_FORCE_GAIN,
+ CAM_EZTUNE_CMD_AEC_FORCE_EXP,
+ CAM_EZTUNE_CMD_AEC_FORCE_SNAP_LC,
+ CAM_EZTUNE_CMD_AEC_FORCE_SNAP_GAIN,
+ CAM_EZTUNE_CMD_AEC_FORCE_SNAP_EXP,
+ CAM_EZTUNE_CMD_AWB_MODE,
+ CAM_EZTUNE_CMD_AWB_FORCE_DUAL_LED_IDX,
+} cam_eztune_cmd_type_t;
+
+typedef struct {
+ cam_eztune_cmd_type_t cmd;
+ union {
+ int32_t running;
+ int32_t aec_enable;
+ int32_t awb_enable;
+ int32_t af_enable;
+ cam_ez_force_params_t ez_force_param;
+ int32_t awb_mode;
+ int32_t ez_force_dual_led_idx;
+ } u;
+} cam_eztune_cmd_data_t;
+
+
+/*****************************************************************************
+ * Code for HAL3 data types *
+ ****************************************************************************/
+typedef enum {
+ CAM_INTF_METADATA_MAX
+} cam_intf_metadata_type_t;
+
+typedef enum {
+ CAM_INTENT_CUSTOM,
+ CAM_INTENT_PREVIEW,
+ CAM_INTENT_STILL_CAPTURE,
+ CAM_INTENT_VIDEO_RECORD,
+ CAM_INTENT_VIDEO_SNAPSHOT,
+ CAM_INTENT_ZERO_SHUTTER_LAG,
+ CAM_INTENT_MAX,
+} cam_intent_t;
+
+typedef enum {
+ /* Full application control of pipeline. All 3A routines are disabled,
+ * no other settings in android.control.* have any effect */
+ CAM_CONTROL_OFF,
+ /* Use settings for each individual 3A routine. Manual control of capture
+ * parameters is disabled. All controls in android.control.* besides sceneMode
+ * take effect */
+ CAM_CONTROL_AUTO,
+ /* Use specific scene mode. Enabling this disables control.aeMode,
+ * control.awbMode and control.afMode controls; the HAL must ignore those
+ * settings while USE_SCENE_MODE is active (except for FACE_PRIORITY scene mode).
+ * Other control entries are still active. This setting can only be used if
+ * availableSceneModes != UNSUPPORTED. TODO: Should we remove this and handle this
+ * in HAL ?*/
+ CAM_CONTROL_USE_SCENE_MODE,
+ CAM_CONTROL_MAX
+} cam_control_mode_t;
+
+typedef enum {
+ /* Use the android.colorCorrection.transform matrix to do color conversion */
+ CAM_COLOR_CORRECTION_TRANSFORM_MATRIX,
+ /* Must not slow down frame rate relative to raw bayer output */
+ CAM_COLOR_CORRECTION_FAST,
+ /* Frame rate may be reduced by high quality */
+ CAM_COLOR_CORRECTION_HIGH_QUALITY,
+} cam_color_correct_mode_t;
+
+typedef enum {
+ CAM_COLOR_CORRECTION_ABERRATION_OFF,
+ CAM_COLOR_CORRECTION_ABERRATION_FAST,
+ CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY,
+ CAM_COLOR_CORRECTION_ABERRATION_MAX
+} cam_aberration_mode_t;
+
+#define CC_MATRIX_ROWS 3
+#define CC_MATRIX_COLS 3
+
+typedef struct {
+ /* 3x3 float matrix in row-major order. each element is in range of (0, 1) */
+ cam_rational_type_t transform_matrix[CC_MATRIX_ROWS][CC_MATRIX_COLS];
+} cam_color_correct_matrix_t;
+
+#define CAM_FOCAL_LENGTHS_MAX 1
+#define CAM_APERTURES_MAX 1
+#define CAM_FILTER_DENSITIES_MAX 1
+#define CAM_MAX_MAP_HEIGHT 6
+#define CAM_MAX_MAP_WIDTH 6
+#define CAM_MAX_SHADING_MAP_WIDTH 17
+#define CAM_MAX_SHADING_MAP_HEIGHT 13
+#define CAM_MAX_TONEMAP_CURVE_SIZE 512
+#define CAM_MAX_FLASH_BRACKETING 5
+
+typedef struct {
+ /* A 1D array of pairs of floats.
+ * Mapping a 0-1 input range to a 0-1 output range.
+ * The input range must be monotonically increasing with N,
+ * and values between entries should be linearly interpolated.
+ * For example, if the array is: [0.0, 0.0, 0.3, 0.5, 1.0, 1.0],
+ * then the input->output mapping for a few sample points would be:
+ * 0 -> 0, 0.15 -> 0.25, 0.3 -> 0.5, 0.5 -> 0.64 */
+ float tonemap_points[CAM_MAX_TONEMAP_CURVE_SIZE][2];
+} cam_tonemap_curve_t;
+
+typedef struct {
+ size_t tonemap_points_cnt;
+ cam_tonemap_curve_t curves[3];
+} cam_rgb_tonemap_curves;
+
+typedef struct {
+ size_t tonemap_points_cnt;
+ cam_tonemap_curve_t curve;
+} cam_profile_tone_curve;
+
+#define NEUTRAL_COL_POINTS 3
+
+typedef struct {
+ cam_rational_type_t neutral_col_point[NEUTRAL_COL_POINTS];
+} cam_neutral_col_point_t;
+
+typedef enum {
+ OFF,
+ FAST,
+ QUALITY,
+} cam_quality_preference_t;
+
+typedef enum {
+ CAM_FLASH_CTRL_OFF,
+ CAM_FLASH_CTRL_SINGLE,
+ CAM_FLASH_CTRL_TORCH
+} cam_flash_ctrl_t;
+
+typedef struct {
+ uint8_t frame_dropped; /* This flag indicates whether any stream buffer is dropped or not */
+ cam_stream_ID_t cam_stream_ID; /* if dropped, Stream ID of dropped streams */
+} cam_frame_dropped_t;
+
+typedef struct {
+ uint8_t ae_mode;
+ uint8_t awb_mode;
+ uint8_t af_mode;
+} cam_scene_mode_overrides_t;
+
+typedef struct {
+ int32_t left;
+ int32_t top;
+ int32_t width;
+ int32_t height;
+} cam_crop_region_t;
+
+typedef struct {
+ /* Estimated sharpness for each region of the input image.
+ * Normalized to be between 0 and maxSharpnessMapValue.
+ * Higher values mean sharper (better focused) */
+ int32_t sharpness[CAM_MAX_MAP_WIDTH][CAM_MAX_MAP_HEIGHT];
+} cam_sharpness_map_t;
+
+typedef struct {
+ float lens_shading[4*CAM_MAX_SHADING_MAP_HEIGHT*CAM_MAX_SHADING_MAP_WIDTH];
+} cam_lens_shading_map_t;
+
+typedef struct {
+ int32_t min_value;
+ int32_t max_value;
+ int32_t def_value;
+ int32_t step;
+} cam_control_range_t;
+
+#define CAM_QCOM_FEATURE_NONE (cam_feature_mask_t)0UL
+#define CAM_QCOM_FEATURE_FACE_DETECTION ((cam_feature_mask_t)1UL<<0)
+#define CAM_QCOM_FEATURE_DENOISE2D ((cam_feature_mask_t)1UL<<1)
+#define CAM_QCOM_FEATURE_CROP ((cam_feature_mask_t)1UL<<2)
+#define CAM_QCOM_FEATURE_ROTATION ((cam_feature_mask_t)1UL<<3)
+#define CAM_QCOM_FEATURE_FLIP ((cam_feature_mask_t)1UL<<4)
+#define CAM_QCOM_FEATURE_HDR ((cam_feature_mask_t)1UL<<5)
+#define CAM_QCOM_FEATURE_REGISTER_FACE ((cam_feature_mask_t)1UL<<6)
+#define CAM_QCOM_FEATURE_SHARPNESS ((cam_feature_mask_t)1UL<<7)
+#define CAM_QCOM_FEATURE_VIDEO_HDR ((cam_feature_mask_t)1UL<<8)
+#define CAM_QCOM_FEATURE_CAC ((cam_feature_mask_t)1UL<<9)
+#define CAM_QCOM_FEATURE_SCALE ((cam_feature_mask_t)1UL<<10)
+#define CAM_QCOM_FEATURE_EFFECT ((cam_feature_mask_t)1UL<<11)
+#define CAM_QCOM_FEATURE_UBIFOCUS ((cam_feature_mask_t)1UL<<12)
+#define CAM_QCOM_FEATURE_CHROMA_FLASH ((cam_feature_mask_t)1UL<<13)
+#define CAM_QCOM_FEATURE_OPTIZOOM ((cam_feature_mask_t)1UL<<14)
+#define CAM_QCOM_FEATURE_SENSOR_HDR ((cam_feature_mask_t)1UL<<15)
+#define CAM_QCOM_FEATURE_REFOCUS ((cam_feature_mask_t)1UL<<16)
+#define CAM_QCOM_FEATURE_CPP_TNR ((cam_feature_mask_t)1UL<<17)
+#define CAM_QCOM_FEATURE_RAW_PROCESSING ((cam_feature_mask_t)1UL<<18)
+#define CAM_QCOM_FEATURE_TRUEPORTRAIT ((cam_feature_mask_t)1UL<<19)
+#define CAM_QCOM_FEATURE_LLVD ((cam_feature_mask_t)1UL<<20)
+#define CAM_QCOM_FEATURE_DIS20 ((cam_feature_mask_t)1UL<<21)
+#define CAM_QCOM_FEATURE_STILLMORE ((cam_feature_mask_t)1UL<<22)
+#define CAM_QCOM_FEATURE_DCRF ((cam_feature_mask_t)1UL<<23)
+#define CAM_QCOM_FEATURE_CDS ((cam_feature_mask_t)1UL<<24)
+#define CAM_QCOM_FEATURE_EZTUNE ((cam_feature_mask_t)1UL<<25)
+#define CAM_QCOM_FEATURE_DSDN ((cam_feature_mask_t)1UL<<26) //Special CDS in CPP block
+#define CAM_QCOM_FEATURE_SW2D ((cam_feature_mask_t)1UL<<27)
+#define CAM_OEM_FEATURE_1 ((cam_feature_mask_t)1UL<<28)
+#define CAM_OEM_FEATURE_2 ((cam_feature_mask_t)1UL<<29)
+#define CAM_QTI_FEATURE_SW_TNR ((cam_feature_mask_t)1UL<<30)
+#define CAM_QCOM_FEATURE_METADATA_PROCESSING ((cam_feature_mask_t)1UL<<31)
+#define CAM_QCOM_FEATURE_PAAF (((cam_feature_mask_t)1UL)<<32)
+#define CAM_QCOM_FEATURE_QUADRA_CFA (((cam_feature_mask_t)1UL)<<33)
+#define CAM_QCOM_FEATURE_PP_SUPERSET (CAM_QCOM_FEATURE_DENOISE2D|CAM_QCOM_FEATURE_CROP|\
+ CAM_QCOM_FEATURE_ROTATION|CAM_QCOM_FEATURE_SHARPNESS|\
+ CAM_QCOM_FEATURE_SCALE|CAM_QCOM_FEATURE_CAC|\
+ CAM_QCOM_FEATURE_EZTUNE|CAM_QCOM_FEATURE_CPP_TNR|\
+ CAM_QCOM_FEATURE_LLVD|CAM_QCOM_FEATURE_QUADRA_CFA)
+
+#define CAM_QCOM_FEATURE_PP_PASS_1 CAM_QCOM_FEATURE_PP_SUPERSET
+#define CAM_QCOM_FEATURE_PP_PASS_2 CAM_QCOM_FEATURE_SCALE | CAM_QCOM_FEATURE_CROP;
+
+// Counter clock wise
+typedef enum {
+ ROTATE_0 = 1<<0,
+ ROTATE_90 = 1<<1,
+ ROTATE_180 = 1<<2,
+ ROTATE_270 = 1<<3,
+} cam_rotation_t;
+
+typedef struct {
+ cam_rotation_t rotation; /* jpeg rotation */
+ cam_rotation_t device_rotation; /* device rotation */
+ uint32_t streamId;
+} cam_rotation_info_t;
+
+typedef enum {
+ FLIP_NONE = 0, /* 00b */
+ FLIP_H = 1, /* 01b */
+ FLIP_V = 2, /* 10b */
+ FLIP_V_H = 3, /* 11b */
+} cam_flip_t;
+
+typedef struct {
+ uint32_t bundle_id; /* bundle id */
+ uint8_t num_of_streams; /* number of streams in the bundle */
+ uint32_t stream_ids[MAX_STREAM_NUM_IN_BUNDLE]; /* array of stream ids to be bundled */
+} cam_bundle_config_t;
+
+typedef enum {
+ CAM_ONLINE_REPROCESS_TYPE, /* online reprocess, frames from running streams */
+ CAM_OFFLINE_REPROCESS_TYPE, /* offline reprocess, frames from external source */
+} cam_reprocess_type_enum_t;
+
+typedef struct {
+ uint8_t burst_count;
+ uint8_t min_burst_count;
+ uint8_t max_burst_count;
+} cam_still_more_t;
+
+typedef struct {
+ uint8_t burst_count;
+ uint8_t output_count;
+ uint8_t flash_bracketing[CAM_MAX_FLASH_BRACKETING];
+ uint8_t metadata_index;
+} cam_chroma_flash_t;
+
+typedef enum {
+ CAM_HDR_MODE_SINGLEFRAME, /* Single frame HDR mode which does only tone mapping */
+ CAM_HDR_MODE_MULTIFRAME, /* Multi frame HDR mode which needs two frames with 0.5x and 2x exposure respectively */
+} cam_hdr_mode_enum_t;
+
+typedef struct {
+ uint32_t hdr_enable;
+ uint32_t hdr_need_1x; /* when CAM_QCOM_FEATURE_HDR enabled, indicate if 1x is needed for output */
+ cam_hdr_mode_enum_t hdr_mode;
+} cam_hdr_param_t;
+
+typedef struct {
+ int32_t output_width;
+ int32_t output_height;
+} cam_scale_param_t;
+
+typedef struct {
+ uint8_t enable;
+ uint8_t burst_count;
+ uint8_t focus_steps[MAX_AF_BRACKETING_VALUES];
+ uint8_t output_count;
+ uint32_t meta_max_size;
+} cam_af_bracketing_t;
+
+typedef struct {
+ uint8_t enable;
+ uint8_t burst_count;
+} cam_flash_bracketing_t;
+
+typedef struct {
+ uint8_t enable;
+ uint8_t burst_count;
+ uint8_t zoom_threshold;
+} cam_opti_zoom_t;
+
+typedef struct {
+ size_t meta_max_size;
+} cam_true_portrait_t;
+
+typedef enum {
+ CAM_FLASH_OFF,
+ CAM_FLASH_ON
+} cam_flash_value_t;
+
+typedef struct {
+ cam_sensor_t sens_type;
+ cam_format_t native_format;
+} cam_sensor_type_t;
+
+typedef struct {
+ uint32_t result;
+ uint32_t header_size;
+ uint32_t width;
+ uint32_t height;
+ uint8_t data[0];
+} cam_misc_buf_t;
+
+typedef struct {
+ uint32_t misc_buffer_index;
+} cam_misc_buf_param_t;
+
+typedef struct {
+ /* reprocess feature mask */
+ cam_feature_mask_t feature_mask;
+
+ /* individual setting for features to be reprocessed */
+ cam_denoise_param_t denoise2d;
+ cam_rect_t input_crop;
+ cam_rotation_t rotation;
+ uint32_t flip;
+ int32_t sharpness;
+ int32_t effect;
+ cam_hdr_param_t hdr_param;
+ cam_scale_param_t scale_param;
+
+ uint8_t zoom_level;
+ cam_flash_value_t flash_value;
+ cam_misc_buf_param_t misc_buf_param;
+ uint32_t burst_cnt;
+ uint8_t cur_reproc_count;
+ uint8_t total_reproc_count;
+} cam_pp_feature_config_t;
+
+typedef struct {
+ uint32_t input_stream_id;
+ /* input source stream type */
+ cam_stream_type_t input_stream_type;
+} cam_pp_online_src_config_t;
+
+typedef struct {
+ /* image format */
+ cam_format_t input_fmt;
+
+ /* image dimension */
+ cam_dimension_t input_dim;
+
+ /* buffer plane information, will be calc based on stream_type, fmt,
+ dim, and padding_info(from stream config). Info including:
+ offset_x, offset_y, stride, scanline, plane offset */
+ cam_stream_buf_plane_info_t input_buf_planes;
+
+ /* number of input reprocess buffers */
+ uint8_t num_of_bufs;
+
+ /* input source type */
+ cam_stream_type_t input_type;
+
+} cam_pp_offline_src_config_t;
+
+/* reprocess stream input configuration */
+typedef struct {
+ /* input source config */
+ cam_reprocess_type_enum_t pp_type;
+ union {
+ cam_pp_online_src_config_t online;
+ cam_pp_offline_src_config_t offline;
+ };
+
+ /* pp feature config */
+ cam_pp_feature_config_t pp_feature_config;
+} cam_stream_reproc_config_t;
+
+typedef struct {
+ uint8_t crop_enabled;
+ cam_rect_t input_crop;
+} cam_crop_param_t;
+
+typedef struct {
+ uint8_t trigger;
+ int32_t trigger_id;
+} cam_trigger_t;
+
+typedef struct {
+ cam_denoise_param_t denoise2d;
+ cam_crop_param_t crop;
+ uint32_t flip; /* 0 means no flip */
+ uint32_t uv_upsample; /* 0 means no chroma upsampling */
+ int32_t sharpness; /* 0 means no sharpness */
+ int32_t effect;
+ cam_rotation_t rotation;
+ cam_rotation_t device_rotation;
+} cam_per_frame_pp_config_t;
+
+typedef enum {
+ CAM_OPT_STAB_OFF,
+ CAM_OPT_STAB_ON,
+ CAM_OPT_STAB_MAX
+} cam_optical_stab_modes_t;
+
+typedef enum {
+ CAM_FILTER_ARRANGEMENT_RGGB,
+ CAM_FILTER_ARRANGEMENT_GRBG,
+ CAM_FILTER_ARRANGEMENT_GBRG,
+ CAM_FILTER_ARRANGEMENT_BGGR,
+
+ /* Sensor is not Bayer; output has 3 16-bit values for each pixel,
+ * instead of just 1 16-bit value per pixel.*/
+ CAM_FILTER_ARRANGEMENT_RGB,
+ /* Sensor is YUV; SW do not have access to actual RAW,
+ * output is interleaved UYVY */
+ CAM_FILTER_ARRANGEMENT_UYVY,
+ CAM_FILTER_ARRANGEMENT_YUYV,
+ CAM_FILTER_ARRANGEMENT_Y
+} cam_color_filter_arrangement_t;
+
+typedef enum {
+ CAM_AF_LENS_STATE_STATIONARY,
+ CAM_AF_LENS_STATE_MOVING,
+} cam_af_lens_state_t;
+
+typedef enum {
+ CAM_AWB_STATE_INACTIVE,
+ CAM_AWB_STATE_SEARCHING,
+ CAM_AWB_STATE_CONVERGED,
+ CAM_AWB_STATE_LOCKED
+} cam_awb_state_t;
+
+typedef enum {
+ CAM_FOCUS_UNCALIBRATED,
+ CAM_FOCUS_APPROXIMATE,
+ CAM_FOCUS_CALIBRATED
+} cam_focus_calibration_t;
+
+typedef enum {
+ CAM_TEST_PATTERN_OFF,
+ CAM_TEST_PATTERN_SOLID_COLOR,
+ CAM_TEST_PATTERN_COLOR_BARS,
+ CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY,
+ CAM_TEST_PATTERN_PN9,
+ CAM_TEST_PATTERN_CUSTOM1 = 256
+} cam_test_pattern_mode_t;
+
+typedef struct {
+ cam_test_pattern_mode_t mode;
+ int32_t r;
+ int32_t gr;
+ int32_t gb;
+ int32_t b;
+} cam_test_pattern_data_t;
+
+typedef enum {
+ CAM_AWB_D50,
+ CAM_AWB_D65,
+ CAM_AWB_D75,
+ CAM_AWB_A,
+ CAM_AWB_CUSTOM_A,
+ CAM_AWB_WARM_FLO,
+ CAM_AWB_COLD_FLO,
+ CAM_AWB_CUSTOM_FLO,
+ CAM_AWB_NOON,
+ CAM_AWB_CUSTOM_DAYLIGHT,
+ CAM_AWB_INVALID_ALL_LIGHT,
+} cam_illuminat_t;
+
+typedef enum {
+ LEGACY_RAW,
+ MIPI_RAW,
+} cam_opaque_raw_format_t;
+
+typedef enum {
+ CAM_PERF_NORMAL = 0,
+ CAM_PERF_HIGH,
+ CAM_PERF_HIGH_PERFORMANCE,
+} cam_perf_mode_t;
+
+typedef struct {
+ float real_gain;
+ float lux_idx;
+ float exp_time;
+} cam_intf_aec_t;
+
+#define CAM_INTF_AEC_DATA_MAX (10)
+
+typedef struct {
+ uint32_t frame_count;
+ cam_intf_aec_t aec_data[CAM_INTF_AEC_DATA_MAX];
+} cam_intf_meta_imglib_input_aec_t;
+
+typedef struct {
+ cam_intf_meta_imglib_input_aec_t meta_imglib_input_aec;
+} cam_intf_meta_imglib_t;
+
+typedef struct {
+ uint8_t previewOnly;
+ uint64_t value;
+} cam_intf_parm_manual_3a_t;
+
+typedef enum {
+ CAM_MANUAL_CAPTURE_TYPE_OFF, /*Manual capture mode disabled*/
+ CAM_MANUAL_CAPTURE_TYPE_1, /*Normal ZSL capture with limited 3A settings*/
+ CAM_MANUAL_CAPTURE_TYPE_2, /*Low light capture mode */
+ CAM_MANUAL_CAPTURE_TYPE_3, /*Offline RAW processing */
+ CAM_MANUAL_CAPTURE_TYPE_4 /*Offline RAW processing with multiple RAW*/
+} cam_manual_capture_type;
+
+typedef enum {
+ CAM_ANALYSIS_INFO_FD_STILL, /*Analysis requirements for STILL PREVIEW*/
+ CAM_ANALYSIS_INFO_FD_VIDEO, /*Analysis requirements for VIDEO*/
+ CAM_ANALYSIS_INFO_PAAF, /*Analysis requirements for PAAF*/
+ CAM_ANALYSIS_INFO_MAX, /*Max number*/
+} cam_analysis_info_type;
+
+typedef struct {
+ /* Whether the information here is valid or not */
+ uint8_t valid;
+
+ /* Whether analysis supported by hw */
+ uint8_t hw_analysis_supported;
+
+ /* Analysis stream max supported size */
+ cam_dimension_t analysis_max_res;
+
+ /* Analysis stream padding info */
+ cam_padding_info_t analysis_padding_info;
+
+ /* Analysis format */
+ cam_format_t analysis_format;
+
+ /* Analysis recommended size */
+ cam_dimension_t analysis_recommended_res;
+} cam_analysis_info_t;
+
+typedef struct {
+ /* Information for DDM metadata*/
+ cam_stream_crop_info_t sensor_crop_info; /* sensor crop info */
+ cam_stream_crop_info_t camif_crop_info; /* CAMIF crop info */
+ cam_stream_crop_info_t isp_crop_info; /* ISP crop info */
+ cam_stream_crop_info_t cpp_crop_info; /* CPP crop info */
+ cam_focal_length_ratio_t af_focal_length_ratio; /* AF focal length ratio */
+ int32_t pipeline_flip; /* current pipeline flip and rotational parameters */
+ cam_rotation_info_t rotation_info; /* rotation information */
+ cam_area_t af_roi; /* AF roi info */
+ /* Information for CPP reprocess */
+ cam_dyn_img_data_t dyn_mask; /* Post processing dynamic feature mask */
+} cam_reprocess_info_t;
+
+/***********************************
+* ENUM definition for custom parameter type
+************************************/
+typedef enum {
+ CAM_CUSTOM_PARM_EXAMPLE,
+ CAM_CUSTOM_PARM_MAX,
+} cam_custom_parm_type;
+
+#endif /* __QCAMERA_TYPES_H__ */
diff --git a/camera/QCamera2/stack/common/mm_camera_interface.h b/camera/QCamera2/stack/common/mm_camera_interface.h
new file mode 100644
index 0000000..40bd6d3
--- /dev/null
+++ b/camera/QCamera2/stack/common/mm_camera_interface.h
@@ -0,0 +1,923 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_INTERFACE_H__
+#define __MM_CAMERA_INTERFACE_H__
+
+// System dependencies
+#include <media/msmb_camera.h>
+
+// Camera dependencies
+#include "cam_intf.h"
+#include "cam_queue.h"
+
+#define MM_CAMERA_MAX_NUM_SENSORS MSM_MAX_CAMERA_SENSORS
+#define MM_CAMERA_MAX_NUM_FRAMES CAM_MAX_NUM_BUFS_PER_STREAM
+/* num of channels allowed in a camera obj */
+#define MM_CAMERA_CHANNEL_MAX 16
+
+#define PAD_TO_SIZE(size, padding) \
+ ((size + (typeof(size))(padding - 1)) & \
+ (typeof(size))(~(padding - 1)))
+
+#define CEIL_DIVISION(n, d) ((n+d-1)/d)
+
+/** CAM_DUMP_TO_FILE:
+ * @filename: file name
+ * @name:filename
+ * @index: index of the file
+ * @extn: file extension
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file
+ **/
+#define CAM_DUMP_TO_FILE(path, name, index, extn, p_addr, len) ({ \
+ size_t rc = 0; \
+ char filename[FILENAME_MAX]; \
+ if (index >= 0) \
+ snprintf(filename, FILENAME_MAX, "%s/%s%d.%s", path, name, index, extn); \
+ else \
+ snprintf(filename, FILENAME_MAX, "%s/%s.%s", path, name, extn); \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ rc = fwrite(p_addr, 1, len, fp); \
+ LOGE("written size %d", len); \
+ fclose(fp); \
+ } else { \
+ LOGE("open %s failed", filename); \
+ } \
+})
+
+/* Declaring Buffer structure */
+struct mm_camera_buf_def;
+
+/** mm_camera_plane_def_t : structure for frame plane info
+* @num_planes : num of planes for the frame buffer, to be
+* filled during mem allocation
+* @planes : plane info for the frame buffer, to be filled
+* during mem allocation
+**/
+typedef struct {
+ int8_t num_planes;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+} mm_camera_plane_buf_def_t;
+
+/** mm_camera_user_buf_def_t : structure for frame plane info
+* @num_buffers : num of buffers in this user defined structure
+* @bufs_used : actual number of buffer filled
+* @buf_in_use : flag to notify buffer usage status.
+* @plane_buf : Plane buffer array pointer.
+**/
+typedef struct {
+ uint8_t num_buffers;
+ uint8_t bufs_used; /*Num of Buffer filled by Kernel*/
+ uint8_t buf_in_use; /* Container buffer is freed to fill*/
+ int32_t buf_idx[MSM_CAMERA_MAX_USER_BUFF_CNT];
+ struct mm_camera_buf_def *plane_buf;
+} mm_camera_user_buf_def_t;
+
+/** mm_camera_buf_def_t: structure for stream frame buf
+* @stream_id : stream handler to uniquely identify a stream
+* object
+* @buf_idx : index of the buf within the stream bufs, to be
+* filled during mem allocation
+* @timespec_ts : time stamp, to be filled when DQBUF is
+* called
+* @frame_idx : frame sequence num, to be filled when DQBUF
+* @plane_buf : Frame plane definition
+* @fd : file descriptor of the frame buffer, to be filled
+* during mem allocation
+* @buffer : pointer to the frame buffer, to be filled during
+* mem allocation
+* @frame_len : length of the whole frame, to be filled during
+* mem allocation
+* @mem_info : user specific pointer to additional mem info
+* @flags: v4l2_buffer flags, used to report error in data buffers
+**/
+typedef struct mm_camera_buf_def {
+ uint32_t stream_id;
+ cam_stream_type_t stream_type;
+ cam_stream_buf_type buf_type;
+ uint32_t buf_idx;
+ uint8_t is_uv_subsampled;
+ struct timespec ts;
+ uint32_t frame_idx;
+ union {
+ mm_camera_plane_buf_def_t planes_buf;
+ mm_camera_user_buf_def_t user_buf;
+ };
+ int fd;
+ void *buffer;
+ size_t frame_len;
+ void *mem_info;
+ uint32_t flags;
+} mm_camera_buf_def_t;
+
+/** mm_camera_super_buf_t: super buf structure for bundled
+* stream frames
+* @camera_handle : camera handler to uniquely identify
+* a camera object
+* @ch_id : channel handler to uniquely ideentify a channel
+* object
+* @num_bufs : number of buffers in the super buf, should not
+* exceeds MAX_STREAM_NUM_IN_BUNDLE
+* @bufs : array of buffers in the bundle
+**/
+typedef struct {
+ uint32_t camera_handle;
+ uint32_t ch_id;
+ uint32_t num_bufs;
+ uint8_t bUnlockAEC;
+ uint8_t bReadyForPrepareSnapshot;
+ mm_camera_buf_def_t* bufs[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_super_buf_t;
+
+/** mm_camera_req_buf_type_t
+* Request type for super buf from channel
+**/
+typedef enum {
+ MM_CAMERA_REQ_SUPER_BUF,
+ MM_CAMERA_REQ_FRAME_SYNC_BUF
+} mm_camera_req_buf_type_t;
+
+/** mm_camera_req_buf_t: Attributes for super buf request
+*
+* @type : type of super buf requested
+* @num_buf_requested : num of super bufs requested
+* @num_retro_buf_requested : number of retro bufs requested
+* @primary_only : specifies if only primary camera frame for a dual
+* camera is requested
+**/
+typedef struct {
+ mm_camera_req_buf_type_t type;
+ uint32_t num_buf_requested;
+ uint32_t num_retro_buf_requested;
+ uint8_t primary_only;
+} mm_camera_req_buf_t;
+
+/** mm_camera_event_t: structure for event
+* @server_event_type : event type from serer
+* @status : status of an event, value could be
+* CAM_STATUS_SUCCESS
+* CAM_STATUS_FAILED
+**/
+typedef struct {
+ cam_event_type_t server_event_type;
+ uint32_t status;
+} mm_camera_event_t;
+
+/** mm_camera_event_notify_t: function definition for event
+* notify handling
+* @camera_handle : camera handler
+* @evt : pointer to an event struct
+* @user_data: user data pointer
+**/
+typedef void (*mm_camera_event_notify_t)(uint32_t camera_handle,
+ mm_camera_event_t *evt,
+ void *user_data);
+
+/** mm_camera_buf_notify_t: function definition for frame notify
+* handling
+* @mm_camera_super_buf_t : received frame buffers
+* @user_data: user data pointer
+**/
+typedef void (*mm_camera_buf_notify_t) (mm_camera_super_buf_t *bufs,
+ void *user_data);
+
+/** map_stream_buf_op_t: function definition for operation of
+* mapping stream buffers via domain socket
+* @frame_idx : buffer index within stream buffers
+* @plane_idx : plane index. If all planes share the same
+* fd, plane_idx = -1; otherwise, plean_idx is
+* the index to plane (0..num_of_planes)
+* @fd : file descriptor of the stream buffer
+* @size: size of the stream buffer
+* @userdata : user data pointer
+**/
+typedef int32_t (*map_stream_buf_op_t) (uint32_t frame_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size,
+ cam_mapping_buf_type type,
+ void *userdata);
+
+typedef int32_t (*map_stream_bufs_op_t) (const cam_buf_map_type_list *buf_map_list,
+ void *userdata);
+
+/** unmap_stream_buf_op_t: function definition for operation of
+* unmapping stream buffers via domain
+* socket
+* @frame_idx : buffer index within stream buffers
+* @plane_idx : plane index. If all planes share the same
+* fd, plane_idx = -1; otherwise, plean_idx is
+* the index to plane (0..num_of_planes)
+* @userdata : user data pointer
+**/
+typedef int32_t (*unmap_stream_buf_op_t) (uint32_t frame_idx,
+ int32_t plane_idx,
+ cam_mapping_buf_type type,
+ void *userdata);
+
+/** mm_camera_map_unmap_ops_tbl_t: virtual table
+* for mapping/unmapping stream buffers via
+* domain socket
+* @map_ops : operation for mapping
+* @unmap_ops : operation for unmapping
+* @userdata: user data pointer
+**/
+typedef struct {
+ map_stream_buf_op_t map_ops;
+ map_stream_bufs_op_t bundled_map_ops;
+ unmap_stream_buf_op_t unmap_ops;
+ void *userdata;
+} mm_camera_map_unmap_ops_tbl_t;
+
+/** mm_camera_stream_mem_vtbl_t: virtual table for stream
+* memory allocation and deallocation
+* @get_bufs : function definition for allocating
+* stream buffers
+* @put_bufs : function definition for deallocating
+* stream buffers
+* @user_data: user data pointer
+**/
+typedef struct {
+ void *user_data;
+ int32_t (*set_config_ops) (mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+ int32_t (*get_bufs) (cam_frame_len_offset_t *offset,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+ int32_t (*put_bufs) (mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+ int32_t (*invalidate_buf)(uint32_t index, void *user_data);
+ int32_t (*clean_invalidate_buf)(uint32_t index, void *user_data);
+} mm_camera_stream_mem_vtbl_t;
+
+/** mm_camera_stream_config_t: structure for stream
+* configuration
+* @stream_info : pointer to a stream info structure
+* @padding_info: padding info obtained from querycapability
+* @mem_tbl : memory operation table for
+* allocating/deallocating stream buffers
+* @stream_cb_sync : SYNC callback handling stream frame notify
+* @stream_cb : ASYNC callback handling stream frame notify
+* @userdata : user data pointer
+**/
+typedef struct {
+ cam_stream_info_t *stream_info;
+ cam_padding_info_t padding_info;
+ mm_camera_stream_mem_vtbl_t mem_vtbl;
+ mm_camera_buf_notify_t stream_cb_sync;
+ mm_camera_buf_notify_t stream_cb;
+ void *userdata;
+} mm_camera_stream_config_t;
+
+/** mm_camera_super_buf_notify_mode_t: enum for super uffer
+* notification mode
+* @MM_CAMERA_SUPER_BUF_NOTIFY_BURST :
+* ZSL use case: get burst of frames
+* @MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS :
+* get continuous frames: when the super buf is ready
+* dispatch it to HAL
+**/
+typedef enum {
+ MM_CAMERA_SUPER_BUF_NOTIFY_BURST = 0,
+ MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS,
+ MM_CAMERA_SUPER_BUF_NOTIFY_MAX
+} mm_camera_super_buf_notify_mode_t;
+
+/** mm_camera_super_buf_priority_t: enum for super buffer
+* matching priority
+* @MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL :
+* Save the frame no matter focused or not. Currently only
+* this type is supported.
+* @MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS :
+* only queue the frame that is focused. Will enable meta
+* data header to carry focus info
+* @MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING :
+* after shutter, only queue matched exposure index
+**/
+typedef enum {
+ MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL = 0,
+ MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS,
+ MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING,
+ MM_CAMERA_SUPER_BUF_PRIORITY_LOW,/* Bundled metadata frame may not match*/
+ MM_CAMERA_SUPER_BUF_PRIORITY_MAX
+} mm_camera_super_buf_priority_t;
+
+/** mm_camera_advanced_capture_t: enum for advanced capture type.
+* @MM_CAMERA_AF_BRACKETING :
+* to enable AF Bracketig.
+* @MM_CAMERA_AE_BRACKETING :
+* to enable AF Bracketing.
+* @MM_CAMERA_FLASH_BRACKETING :
+* to enable Flash Bracketing.
+* @MM_CAMERA_ZOOM_1X :
+* to enable zoom 1x capture request
+**/
+typedef enum {
+ MM_CAMERA_AF_BRACKETING = 0,
+ MM_CAMERA_AE_BRACKETING,
+ MM_CAMERA_FLASH_BRACKETING,
+ MM_CAMERA_ZOOM_1X,
+ MM_CAMERA_FRAME_CAPTURE,
+} mm_camera_advanced_capture_t;
+
+/** mm_camera_stream_cb_type: enum for stream buffer callback type.
+* @MM_CAMERA_STREAM_CB_TYPE_ASYNC :
+* callback is async type. buffer process done in client thread context
+* @MM_CAMERA_STREAM_CB_TYPE_SYNC :
+* callback is sync type. buffer process done interface thread context
+**/
+typedef enum {
+ MM_CAMERA_STREAM_CB_TYPE_ASYNC,
+ MM_CAMERA_STREAM_CB_TYPE_SYNC,
+} mm_camera_stream_cb_type;
+
+
+/** mm_camera_channel_attr_t: structure for defining channel
+* attributes
+* @notify_mode : notify mode: burst or continuous
+* @water_mark : queue depth. Only valid for burst mode
+* @look_back : look back how many frames from last buf.
+* Only valid for burst mode
+* @post_frame_skip : after send first frame to HAL, how many
+* frames needing to be skipped for next
+* delivery. Only valid for burst mode
+* @max_unmatched_frames : max number of unmatched frames in
+* queue
+* @enable_frame_sync: Enables frame sync for dual camera
+* @priority : save matched priority frames only
+* @user_expected_frame_id : Number of frames, camera interface
+* will wait for getting the instant capture frame.
+**/
+typedef struct {
+ mm_camera_super_buf_notify_mode_t notify_mode;
+ uint8_t water_mark;
+ uint8_t look_back;
+ uint8_t post_frame_skip;
+ uint8_t max_unmatched_frames;
+ uint8_t enable_frame_sync;
+ mm_camera_super_buf_priority_t priority;
+ uint8_t user_expected_frame_id;
+} mm_camera_channel_attr_t;
+
+typedef struct {
+ /** query_capability: fucntion definition for querying static
+ * camera capabilities
+ * @camera_handle : camer handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: would assume cam_capability_t is already mapped
+ **/
+ int32_t (*query_capability) (uint32_t camera_handle);
+
+ /** register_event_notify: fucntion definition for registering
+ * for event notification
+ * @camera_handle : camer handler
+ * @evt_cb : callback for event notify
+ * @user_data : user data poiner
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*register_event_notify) (uint32_t camera_handle,
+ mm_camera_event_notify_t evt_cb,
+ void *user_data);
+
+ /** close_camera: fucntion definition for closing a camera
+ * @camera_handle : camer handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*close_camera) (uint32_t camera_handle);
+
+ /** map_buf: fucntion definition for mapping a camera buffer
+ * via domain socket
+ * @camera_handle : camer handler
+ * @buf_type : type of mapping buffers, can be value of
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ * @fd : file descriptor of the stream buffer
+ * @size : size of the stream buffer
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*map_buf) (uint32_t camera_handle,
+ uint8_t buf_type,
+ int fd,
+ size_t size);
+
+ /** map_bufs: function definition for mapping multiple camera buffers
+ * via domain socket
+ * @camera_handle : camera handler
+ * @buf_map_list : list of buffers to map
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*map_bufs) (uint32_t camera_handle,
+ const cam_buf_map_type_list *buf_map_list);
+
+ /** unmap_buf: fucntion definition for unmapping a camera buffer
+ * via domain socket
+ * @camera_handle : camer handler
+ * @buf_type : type of mapping buffers, can be value of
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*unmap_buf) (uint32_t camera_handle,
+ uint8_t buf_type);
+
+ /** set_parms: fucntion definition for setting camera
+ * based parameters to server
+ * @camera_handle : camer handler
+ * @parms : batch for parameters to be set, stored in
+ * parm_buffer_t
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: would assume parm_buffer_t is already mapped, and
+ * according parameter entries to be set are filled in the
+ * buf before this call
+ **/
+ int32_t (*set_parms) (uint32_t camera_handle,
+ parm_buffer_t *parms);
+
+ /** get_parms: fucntion definition for querying camera
+ * based parameters from server
+ * @camera_handle : camer handler
+ * @parms : batch for parameters to be queried, stored in
+ * parm_buffer_t
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: would assume parm_buffer_t is already mapped, and
+ * according parameter entries to be queried are filled in
+ * the buf before this call
+ **/
+ int32_t (*get_parms) (uint32_t camera_handle,
+ parm_buffer_t *parms);
+
+ /** do_auto_focus: fucntion definition for performing auto focus
+ * @camera_handle : camer handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: if this call success, we will always assume there will
+ * be an auto_focus event following up.
+ **/
+ int32_t (*do_auto_focus) (uint32_t camera_handle);
+
+ /** cancel_auto_focus: fucntion definition for cancelling
+ * previous auto focus request
+ * @camera_handle : camer handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*cancel_auto_focus) (uint32_t camera_handle);
+
+ /** prepare_snapshot: fucntion definition for preparing hardware
+ * for snapshot.
+ * @camera_handle : camer handler
+ * @do_af_flag : flag indicating if AF needs to be done
+ * 0 -- no AF needed
+ * 1 -- AF needed
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*prepare_snapshot) (uint32_t camera_handle,
+ int32_t do_af_flag);
+
+ /** start_zsl_snapshot: function definition for starting
+ * zsl snapshot.
+ * @camera_handle : camer handler
+ * @ch_id : channel id
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*start_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
+
+ /** stop_zsl_snapshot: function definition for stopping
+ * zsl snapshot.
+ * @camera_handle : camer handler
+ * @ch_id : channel id
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*stop_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
+
+ /** add_channel: fucntion definition for adding a channel
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @attr : pointer to channel attribute structure
+ * @channel_cb : callbak to handle bundled super buffer
+ * @userdata : user data pointer
+ * Return value: channel id, zero is invalid ch_id
+ * Note: attr, channel_cb, and userdata can be NULL if no
+ * superbufCB is needed
+ **/
+ uint32_t (*add_channel) (uint32_t camera_handle,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata);
+
+ /** delete_channel: fucntion definition for deleting a channel
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*delete_channel) (uint32_t camera_handle,
+ uint32_t ch_id);
+
+ /** get_bundle_info: function definition for querying bundle
+ * info of the channel
+ * @camera_handle : camera handler
+ * @ch_id : channel handler
+ * @bundle_info : bundle info to be filled in
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*get_bundle_info) (uint32_t camera_handle,
+ uint32_t ch_id,
+ cam_bundle_config_t *bundle_info);
+
+ /** add_stream: fucntion definition for adding a stream
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * Return value: stream_id. zero is invalid stream_id
+ **/
+ uint32_t (*add_stream) (uint32_t camera_handle,
+ uint32_t ch_id);
+
+ /** delete_stream: fucntion definition for deleting a stream
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*delete_stream) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id);
+
+ /** link_stream: function definition for linking a stream
+ * @camera_handle : camera handle
+ * @ch_id : channel handle from which the stream originates
+ * @stream_id : stream handle
+ * @linked_ch_id: channel handle in which the stream will be linked
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*link_stream) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint32_t linked_ch_id);
+
+ /** config_stream: fucntion definition for configuring a stream
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * @confid : pointer to a stream configuration structure
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*config_stream) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+
+ /** map_stream_buf: fucntion definition for mapping
+ * stream buffer via domain socket
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * @buf_type : type of mapping buffers, can be value of
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : buffer index within the stream buffers
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the stream buffer
+ * @size : size of the stream buffer
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*map_stream_buf) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size);
+
+ /** map_stream_bufs: function definition for mapping multiple
+ * stream buffers via domain socket
+ * @camera_handle : camera handler
+ * @ch_id : channel handler
+ * @buf_map_list : list of buffers to map
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*map_stream_bufs) (uint32_t camera_handle,
+ uint32_t ch_id,
+ const cam_buf_map_type_list *buf_map_list);
+
+ /** unmap_stream_buf: fucntion definition for unmapping
+ * stream buffer via domain socket
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * @buf_type : type of mapping buffers, can be value of
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : buffer index within the stream buffers
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*unmap_stream_buf) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx);
+
+ /** set_stream_parms: fucntion definition for setting stream
+ * specific parameters to server
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * @parms : batch for parameters to be set
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: would assume parm buffer is already mapped, and
+ * according parameter entries to be set are filled in the
+ * buf before this call
+ **/
+ int32_t (*set_stream_parms) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms);
+
+ /** get_stream_parms: fucntion definition for querying stream
+ * specific parameters from server
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * @parms : batch for parameters to be queried
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: would assume parm buffer is already mapped, and
+ * according parameter entries to be queried are filled in
+ * the buf before this call
+ **/
+ int32_t (*get_stream_parms) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms);
+
+ /** start_channel: fucntion definition for starting a channel
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ * This call will start all streams belongs to the channel
+ **/
+ int32_t (*start_channel) (uint32_t camera_handle,
+ uint32_t ch_id);
+
+ /** stop_channel: fucntion definition for stopping a channel
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ * This call will stop all streams belongs to the channel
+ **/
+ int32_t (*stop_channel) (uint32_t camera_handle,
+ uint32_t ch_id);
+
+ /** qbuf: fucntion definition for queuing a frame buffer back to
+ * kernel for reuse
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @buf : a frame buffer to be queued back to kernel
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*qbuf) (uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf);
+
+ /** get_queued_buf_count: fucntion definition for querying queued buf count
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * Return value: queued buf count
+ **/
+ int32_t (*get_queued_buf_count) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id);
+
+ /** request_super_buf: fucntion definition for requesting frames
+ * from superbuf queue in burst mode
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @buf : provides info related to the super buf request
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*request_super_buf) (uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_req_buf_t *buf);
+
+ /** cancel_super_buf_request: fucntion definition for canceling
+ * frames dispatched from superbuf queue in
+ * burst mode
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*cancel_super_buf_request) (uint32_t camera_handle,
+ uint32_t ch_id);
+
+ /** flush_super_buf_queue: function definition for flushing out
+ * all frames in the superbuf queue up to frame_idx,
+ * even if frames with frame_idx come in later than
+ * this call.
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @frame_idx : frame index up until which all superbufs are flushed
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*flush_super_buf_queue) (uint32_t camera_handle,
+ uint32_t ch_id, uint32_t frame_idx);
+
+ /** configure_notify_mode: function definition for configuring the
+ * notification mode of channel
+ * @camera_handle : camera handler
+ * @ch_id : channel handler
+ * @notify_mode : notification mode
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*configure_notify_mode) (uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_super_buf_notify_mode_t notify_mode);
+
+ /** process_advanced_capture: function definition for start/stop advanced capture
+ * for snapshot.
+ * @camera_handle : camera handle
+ * @ch_id : channel handler
+ * @type : advanced capture type.
+ * @trigger : flag indicating if advanced capture needs to be done
+ * 0 -- stop advanced capture
+ * 1 -- start advanced capture
+ * @in_value: Input value. Configaration
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*process_advanced_capture) (uint32_t camera_handle,
+ uint32_t ch_id, mm_camera_advanced_capture_t type,
+ int8_t start_flag, void *in_value);
+
+ /** get_session_id: gets the backend session id from the kernel
+ * @camera_handle : camera handle
+ * @sessionid : session id to be retrieved
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: if this call succeeds, we will get a valid session id
+ **/
+ int32_t (*get_session_id) (uint32_t camera_handle,
+ uint32_t* sessionid);
+
+ /** sync_related_sensors: sends sync cmd
+ * @camera_handle : camera handle
+ * @related_cam_info : related cam info to be sent to server
+ * Return value: 0 -- success
+ * -1 -- failure
+ * Note: if this call succeeds, we will get linking established in back end
+ **/
+ int32_t (*sync_related_sensors) (uint32_t camera_handle,
+ cam_sync_related_sensors_event_info_t*
+ related_cam_info);
+ /** flush: function definition for flush
+ * @camera_handle: camera handler
+ * Return value: 0 -- success
+ * -1 -- failure
+ **/
+ int32_t (*flush) (uint32_t camera_handle);
+
+ /** register_stream_buf_cb: fucntion definition for registering special stream callbacks
+ * @camera_handle : camer handler
+ * @ch_id : channel handler
+ * @stream_id : stream handler
+ * @buf_cb : callback function pointer
+ * @cb_type : Callback type SYNC/ASYNC
+ * @userdata : user data pointer
+ * Return value: 0 -- success
+ * - 1 -- failure
+ **/
+ int32_t (*register_stream_buf_cb) (uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+ mm_camera_stream_cb_type cb_type, void *userdata);
+} mm_camera_ops_t;
+
+/** mm_camera_vtbl_t: virtual table for camera operations
+* @camera_handle : camera handler which uniquely identifies a
+* camera object
+* @ops : API call table
+**/
+typedef struct {
+ uint32_t camera_handle;
+ mm_camera_ops_t *ops;
+} mm_camera_vtbl_t;
+
+/* return number of cameras */
+uint8_t get_num_of_cameras();
+
+/* return reference pointer of camera vtbl */
+int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_obj);
+
+/* helper functions */
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_analysis(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+uint32_t mm_stream_calc_lcm (int32_t num1, int32_t num2);
+
+struct camera_info *get_cam_info(uint32_t camera_id, cam_sync_type_t *pCamType);
+
+uint8_t is_yuv_sensor(uint32_t camera_id);
+
+#endif /*__MM_CAMERA_INTERFACE_H__*/
diff --git a/camera/QCamera2/stack/common/mm_jpeg_interface.h b/camera/QCamera2/stack/common/mm_jpeg_interface.h
new file mode 100644
index 0000000..9f0ac7f
--- /dev/null
+++ b/camera/QCamera2/stack/common/mm_jpeg_interface.h
@@ -0,0 +1,408 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INTERFACE_H_
+#define MM_JPEG_INTERFACE_H_
+
+// System dependencies
+#include <stdbool.h>
+
+// Camera dependencies
+#include "QOMX_JpegExtensions.h"
+#include "cam_intf.h"
+
+#define MM_JPEG_MAX_PLANES 3
+#define MM_JPEG_MAX_BUF CAM_MAX_NUM_BUFS_PER_STREAM
+#define QUANT_SIZE 64
+#define QTABLE_MAX 2
+#define MM_JPEG_MAX_MPO_IMAGES 2
+
+typedef enum {
+ MM_JPEG_FMT_YUV,
+ MM_JPEG_FMT_BITSTREAM
+} mm_jpeg_format_t;
+
+typedef enum {
+ MM_JPEG_TYPE_JPEG,
+ MM_JPEG_TYPE_MPO
+} mm_jpeg_image_type_t;
+
+typedef struct {
+ cam_ae_exif_debug_t ae_debug_params;
+ cam_awb_exif_debug_t awb_debug_params;
+ cam_af_exif_debug_t af_debug_params;
+ cam_asd_exif_debug_t asd_debug_params;
+ cam_stats_buffer_exif_debug_t stats_debug_params;
+ cam_bestats_buffer_exif_debug_t bestats_debug_params;
+ cam_bhist_buffer_exif_debug_t bhist_debug_params;
+ cam_q3a_tuning_info_t q3a_tuning_debug_params;
+ uint8_t ae_debug_params_valid;
+ uint8_t awb_debug_params_valid;
+ uint8_t af_debug_params_valid;
+ uint8_t asd_debug_params_valid;
+ uint8_t stats_debug_params_valid;
+ uint8_t bestats_debug_params_valid;
+ uint8_t bhist_debug_params_valid;
+ uint8_t q3a_tuning_debug_params_valid;
+} mm_jpeg_debug_exif_params_t;
+
+typedef struct {
+ cam_3a_params_t cam_3a_params;
+ uint8_t cam_3a_params_valid;
+ cam_sensor_params_t sensor_params;
+ mm_jpeg_debug_exif_params_t *debug_params;
+} mm_jpeg_exif_params_t;
+
+typedef struct {
+ /* Indicates if it is a single jpeg or part of a multi picture sequence */
+ mm_jpeg_image_type_t type;
+
+ /* Indicates if image is the primary image in a sequence of images.
+ Applicable only to multi picture formats */
+ uint8_t is_primary;
+
+ /* Number of images in the sequence */
+ uint32_t num_of_images;
+
+ /* Flag to indicate if multi picture metadata need to be added to Exif */
+ uint8_t enable_metadata;
+} mm_jpeg_multi_image_t;
+
+typedef struct {
+ uint32_t sequence; /* for jpeg bit streams, assembling is based on sequence. sequence starts from 0 */
+ uint8_t *buf_vaddr; /* ptr to buf */
+ int fd; /* fd of buf */
+ size_t buf_size; /* total size of buf (header + image) */
+ mm_jpeg_format_t format; /* buffer format*/
+ cam_frame_len_offset_t offset; /* offset of all the planes */
+ uint32_t index; /* index used to identify the buffers */
+} mm_jpeg_buf_t;
+
+typedef struct {
+ uint8_t *buf_vaddr; /* ptr to buf */
+ int fd; /* fd of buf */
+ size_t buf_filled_len; /* used for output image. filled by the client */
+} mm_jpeg_output_t;
+
+typedef enum {
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2,
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1,
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2,
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1,
+ MM_JPEG_COLOR_FORMAT_MONOCHROME,
+ MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V2,
+ MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V1,
+ MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V2,
+ MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V1,
+ MM_JPEG_COLOR_FORMAT_MAX
+} mm_jpeg_color_format;
+
+typedef enum {
+ JPEG_JOB_STATUS_DONE = 0,
+ JPEG_JOB_STATUS_ERROR
+} jpeg_job_status_t;
+
+typedef void (*jpeg_encode_callback_t)(jpeg_job_status_t status,
+ uint32_t client_hdl,
+ uint32_t jobId,
+ mm_jpeg_output_t *p_output,
+ void *userData);
+
+typedef struct {
+ /* src img dimension */
+ cam_dimension_t src_dim;
+
+ /* jpeg output dimension */
+ cam_dimension_t dst_dim;
+
+ /* crop information */
+ cam_rect_t crop;
+} mm_jpeg_dim_t;
+
+typedef struct {
+ /* num of buf in src img */
+ uint32_t num_src_bufs;
+
+ /* num of src tmb bufs */
+ uint32_t num_tmb_bufs;
+
+ /* num of buf in src img */
+ uint32_t num_dst_bufs;
+
+ /* should create thumbnail from main image or not */
+ uint32_t encode_thumbnail;
+
+ /* src img bufs */
+ mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+ /* this will be used only for bitstream */
+ mm_jpeg_buf_t src_thumb_buf[MM_JPEG_MAX_BUF];
+
+ /* this will be used only for bitstream */
+ mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+ /* mainimage color format */
+ mm_jpeg_color_format color_format;
+
+ /* thumbnail color format */
+ mm_jpeg_color_format thumb_color_format;
+
+ /* jpeg quality: range 0~100 */
+ uint32_t quality;
+
+ /* jpeg thumbnail quality: range 0~100 */
+ uint32_t thumb_quality;
+
+ /* buf to exif entries, caller needs to
+ * take care of the memory manage with insider ptr */
+ QOMX_EXIF_INFO exif_info;
+
+ /*Callback registered to be called after encode*/
+ jpeg_encode_callback_t jpeg_cb;
+
+ /*Appdata passed by the user*/
+ void* userdata;
+
+ /* thumbnail dimension */
+ mm_jpeg_dim_t thumb_dim;
+
+ /* rotation informaiton */
+ uint32_t rotation;
+
+ /* thumb rotation informaiton */
+ uint32_t thumb_rotation;
+
+ /* main image dimension */
+ mm_jpeg_dim_t main_dim;
+
+ /* enable encoder burst mode */
+ uint32_t burst_mode;
+
+ /* get memory function ptr */
+ int (*get_memory)( omx_jpeg_ouput_buf_t *p_out_buf);
+
+ /* release memory function ptr */
+ int (*put_memory)( omx_jpeg_ouput_buf_t *p_out_buf);
+
+ /* Flag to indicate whether to generate thumbnail from postview */
+ bool thumb_from_postview;
+} mm_jpeg_encode_params_t;
+
+typedef struct {
+ /* num of buf in src img */
+ uint32_t num_src_bufs;
+
+ /* num of buf in src img */
+ uint32_t num_dst_bufs;
+
+ /* src img bufs */
+ mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+ /* this will be used only for bitstream */
+ mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+ /* color format */
+ mm_jpeg_color_format color_format;
+
+ jpeg_encode_callback_t jpeg_cb;
+
+ void* userdata;
+
+} mm_jpeg_decode_params_t;
+
+typedef struct {
+ /* active indices of the buffers for encoding */
+ int32_t src_index;
+ int32_t dst_index;
+ uint32_t thumb_index;
+ mm_jpeg_dim_t thumb_dim;
+
+ /* rotation informaiton */
+ uint32_t rotation;
+
+ /* main image dimension */
+ mm_jpeg_dim_t main_dim;
+
+ /*session id*/
+ uint32_t session_id;
+
+ /* jpeg output buffer ref count */
+ int32_t ref_count;
+
+ /* allocated jpeg output buffer */
+ void *alloc_out_buffer;
+
+ /*Metadata stream*/
+ metadata_buffer_t *p_metadata;
+
+ /*HAL version*/
+ cam_hal_version_t hal_version;
+
+ /* buf to exif entries, caller needs to
+ * take care of the memory manage with insider ptr */
+ QOMX_EXIF_INFO exif_info;
+
+ /* 3a parameters */
+ mm_jpeg_exif_params_t cam_exif_params;
+
+ /* jpeg encoder QTable */
+ uint8_t qtable_set[QTABLE_MAX];
+
+ OMX_IMAGE_PARAM_QUANTIZATIONTABLETYPE qtable[QTABLE_MAX];
+
+ /* flag to enable/disable mobicat */
+ uint8_t mobicat_mask;
+
+ /*Info associated with multiple image sequence*/
+ mm_jpeg_multi_image_t multi_image_info;
+
+ /* work buf */
+ mm_jpeg_buf_t work_buf;
+} mm_jpeg_encode_job_t;
+
+typedef struct {
+ /* active indices of the buffers for encoding */
+ int32_t src_index;
+ int32_t dst_index;
+ uint32_t tmb_dst_index;
+
+ /* rotation informaiton */
+ uint32_t rotation;
+
+ /* main image */
+ mm_jpeg_dim_t main_dim;
+
+ /*session id*/
+ uint32_t session_id;
+} mm_jpeg_decode_job_t;
+
+typedef enum {
+ JPEG_JOB_TYPE_ENCODE,
+ JPEG_JOB_TYPE_DECODE,
+ JPEG_JOB_TYPE_MAX
+} mm_jpeg_job_type_t;
+
+typedef struct {
+ mm_jpeg_job_type_t job_type;
+ union {
+ mm_jpeg_encode_job_t encode_job;
+ mm_jpeg_decode_job_t decode_job;
+ };
+} mm_jpeg_job_t;
+
+typedef struct {
+ uint32_t w;
+ uint32_t h;
+} mm_dimension;
+
+typedef struct {
+ /*Primary image in the MPO sequence*/
+ mm_jpeg_output_t primary_image;
+
+ /*All auxillary images in the sequence*/
+ mm_jpeg_output_t aux_images[MM_JPEG_MAX_MPO_IMAGES - 1];
+
+ /*Total number of images in the MPO sequence*/
+ int num_of_images;
+
+ /*Output MPO buffer*/
+ mm_jpeg_output_t output_buff;
+
+ /*Size of the allocated output buffer*/
+ size_t output_buff_size;
+} mm_jpeg_mpo_info_t;
+
+typedef struct {
+ /* config a job -- async call */
+ int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+ /* abort a job -- sync call */
+ int (*abort_job)(uint32_t job_id);
+
+ /* create a session */
+ int (*create_session)(uint32_t client_hdl,
+ mm_jpeg_encode_params_t *p_params, uint32_t *p_session_id);
+
+ /* destroy session */
+ int (*destroy_session)(uint32_t session_id);
+
+ /* close a jpeg client -- sync call */
+ int (*close) (uint32_t clientHdl);
+
+} mm_jpeg_ops_t;
+
+typedef struct {
+ /* config a job -- async call */
+ int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+ /* abort a job -- sync call */
+ int (*abort_job)(uint32_t job_id);
+
+ /* create a session */
+ int (*create_session)(uint32_t client_hdl,
+ mm_jpeg_decode_params_t *p_params, uint32_t *p_session_id);
+
+ /* destroy session */
+ int (*destroy_session)(uint32_t session_id);
+
+ /* close a jpeg client -- sync call */
+ int (*close) (uint32_t clientHdl);
+} mm_jpegdec_ops_t;
+
+typedef struct {
+
+ /* Get Mpo size*/
+ int (*get_mpo_size)(mm_jpeg_output_t jpeg_buffer[MM_JPEG_MAX_MPO_IMAGES],
+ int num_of_images);
+
+ /* Compose MPO*/
+ int (*compose_mpo)(mm_jpeg_mpo_info_t *mpo_info);
+
+} mm_jpeg_mpo_ops_t;
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl and mpo ops tbl will be filled in if open succeeds
+ * and jpeg meta data will be cached */
+uint32_t jpeg_open(mm_jpeg_ops_t *ops, mm_jpeg_mpo_ops_t *mpo_ops,
+ mm_dimension picture_size,
+ cam_jpeg_metadata_t *jpeg_metadata);
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl will be filled in if open succeeds */
+uint32_t jpegdec_open(mm_jpegdec_ops_t *ops);
+
+#endif /* MM_JPEG_INTERFACE_H_ */
diff --git a/camera/QCamera2/stack/mm-camera-interface/Android.mk b/camera/QCamera2/stack/mm-camera-interface/Android.mk
new file mode 100644
index 0000000..a229734
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/Android.mk
@@ -0,0 +1,63 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+MM_CAM_FILES := \
+ src/mm_camera_interface.c \
+ src/mm_camera.c \
+ src/mm_camera_channel.c \
+ src/mm_camera_stream.c \
+ src/mm_camera_thread.c \
+ src/mm_camera_sock.c
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+ LOCAL_CFLAGS += -DUSE_ION
+endif
+
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt, $(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS += -DVENUS_PRESENT
+endif
+
+ifneq (,$(filter msm8996 msmcobalt,$(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS += -DUBWC_PRESENT
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS += ../common/cam_intf.h
+LOCAL_COPY_HEADERS += ../common/cam_types.h
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)/inc \
+ $(LOCAL_PATH)/../common \
+ hardware/libhardware/include/hardware \
+ hardware/qcom/media/mm-core/inc \
+ system/media/camera/include \
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+ifneq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= 17 ))" )))
+ LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/socket.h
+ LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/un.h
+endif
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_SRC_FILES := $(MM_CAM_FILES)
+
+LOCAL_MODULE := libmmcamera_interface
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
new file mode 100644
index 0000000..971ae74
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -0,0 +1,767 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+// System dependencies
+#include <poll.h>
+
+// Camera dependencies
+#include "camera_common.h"
+#include "cam_semaphore.h"
+#include "mm_camera_interface.h"
+
+/**********************************************************************************
+* Data structure declarations
+***********************************************************************************/
+/* num of callbacks allowed for an event type */
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+/* num of data callbacks allowed in a stream obj */
+#define MM_CAMERA_STREAM_BUF_CB_MAX 4
+/* num of data poll threads allowed in a channel obj */
+#define MM_CAMERA_CHANNEL_POLL_THREAD_MAX 1
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 20
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+#define THREAD_NAME_SIZE 15
+
+/* Future frame idx, large enough to make sure capture
+* settings can be applied and small enough to still capture an image */
+#define MM_CAMERA_MAX_FUTURE_FRAME_WAIT 100
+#define WAIT_TIMEOUT 5
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#define ARRAY_SIZE(a) (sizeof(a)/sizeof((a)[0]))
+
+struct mm_channel;
+struct mm_stream;
+struct mm_camera_obj;
+
+typedef int64_t nsecs_t;
+
+typedef enum
+{
+ MM_CAMERA_CMD_TYPE_DATA_CB, /* dataB CMD */
+ MM_CAMERA_CMD_TYPE_EVT_CB, /* evtCB CMD */
+ MM_CAMERA_CMD_TYPE_EXIT, /* EXIT */
+ MM_CAMERA_CMD_TYPE_REQ_DATA_CB,/* request data */
+ MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB, /* superbuf dataB CMD */
+ MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY, /* configure notify mode */
+ MM_CAMERA_CMD_TYPE_START_ZSL, /* start zsl snapshot for channel */
+ MM_CAMERA_CMD_TYPE_STOP_ZSL, /* stop zsl snapshot for channel */
+ MM_CAMERA_CMD_TYPE_FLUSH_QUEUE, /* flush queue */
+ MM_CAMERA_CMD_TYPE_GENERAL, /* general cmd */
+ MM_CAMERA_CMD_TYPE_MAX
+} mm_camera_cmdcb_type_t;
+
+typedef struct {
+ uint32_t stream_id;
+ uint32_t frame_idx;
+ uint32_t flags;
+ mm_camera_buf_def_t *buf; /* ref to buf */
+} mm_camera_buf_info_t;
+
+typedef enum {
+ MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING,
+ MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING,
+ MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING,
+ MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X,
+ MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING,
+} mm_camera_generic_cmd_type_t;
+
+typedef struct {
+ mm_camera_generic_cmd_type_t type;
+ uint32_t payload[32];
+ union {
+ cam_capture_frame_config_t frame_config;
+ };
+} mm_camera_generic_cmd_t;
+
+typedef struct {
+ uint32_t frame_idx;
+ cam_stream_type_t stream_type;
+} mm_camera_flush_cmd_t;
+
+typedef struct {
+ mm_camera_cmdcb_type_t cmd_type;
+ union {
+ mm_camera_buf_info_t buf; /* frame buf if dataCB */
+ mm_camera_event_t evt; /* evt if evtCB */
+ mm_camera_super_buf_t superbuf; /* superbuf if superbuf dataCB*/
+ mm_camera_req_buf_t req_buf; /* num of buf requested */
+ mm_camera_flush_cmd_t flush_cmd; /* frame idx boundary for flush superbuf queue*/
+ mm_camera_super_buf_notify_mode_t notify_mode; /* notification mode */
+ mm_camera_generic_cmd_t gen_cmd;
+ } u;
+} mm_camera_cmdcb_t;
+
+typedef void (*mm_camera_cmd_cb_t)(mm_camera_cmdcb_t * cmd_cb, void* user_data);
+
+typedef struct {
+ uint8_t is_active; /*indicates whether thread is active or not */
+ cam_queue_t cmd_queue; /* cmd queue (queuing dataCB, asyncCB, or exitCMD) */
+ pthread_t cmd_pid; /* cmd thread ID */
+ cam_semaphore_t cmd_sem; /* semaphore for cmd thread */
+ cam_semaphore_t sync_sem; /* semaphore for synchronization with cmd thread */
+ mm_camera_cmd_cb_t cb; /* cb for cmd */
+ void* user_data; /* user_data for cb */
+ char threadName[THREAD_NAME_SIZE];
+} mm_camera_cmd_thread_t;
+
+typedef enum {
+ MM_CAMERA_POLL_TYPE_EVT,
+ MM_CAMERA_POLL_TYPE_DATA,
+ MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+/* function ptr defined for poll notify CB,
+ * registered at poll thread with poll fd */
+typedef void (*mm_camera_poll_notify_t)(void *user_data);
+
+typedef struct {
+ int32_t fd;
+ mm_camera_poll_notify_t notify_cb;
+ uint32_t handler;
+ void* user_data;
+} mm_camera_poll_entry_t;
+
+typedef struct {
+ mm_camera_poll_thread_type_t poll_type;
+ /* array to store poll fd and cb info
+ * for MM_CAMERA_POLL_TYPE_EVT, only index 0 is valid;
+ * for MM_CAMERA_POLL_TYPE_DATA, depends on valid stream fd */
+ mm_camera_poll_entry_t poll_entries[MAX_STREAM_NUM_IN_BUNDLE];
+ int32_t pfds[2];
+ pthread_t pid;
+ int32_t state;
+ int timeoutms;
+ uint32_t cmd;
+ struct pollfd poll_fds[MAX_STREAM_NUM_IN_BUNDLE + 1];
+ uint8_t num_fds;
+ pthread_mutex_t mutex;
+ pthread_cond_t cond_v;
+ int32_t status;
+ char threadName[THREAD_NAME_SIZE];
+ //void *my_obj;
+} mm_camera_poll_thread_t;
+
+/* mm_stream */
+typedef enum {
+ MM_STREAM_STATE_NOTUSED = 0, /* not used */
+ MM_STREAM_STATE_INITED, /* inited */
+ MM_STREAM_STATE_ACQUIRED, /* acquired, fd opened */
+ MM_STREAM_STATE_CFG, /* fmt & dim configured */
+ MM_STREAM_STATE_BUFFED, /* buf allocated */
+ MM_STREAM_STATE_REG, /* buf regged, stream off */
+ MM_STREAM_STATE_ACTIVE, /* active */
+ MM_STREAM_STATE_MAX
+} mm_stream_state_type_t;
+
+typedef enum {
+ MM_STREAM_EVT_ACQUIRE,
+ MM_STREAM_EVT_RELEASE,
+ MM_STREAM_EVT_SET_FMT,
+ MM_STREAM_EVT_GET_BUF,
+ MM_STREAM_EVT_PUT_BUF,
+ MM_STREAM_EVT_REG_BUF,
+ MM_STREAM_EVT_UNREG_BUF,
+ MM_STREAM_EVT_START,
+ MM_STREAM_EVT_STOP,
+ MM_STREAM_EVT_QBUF,
+ MM_STREAM_EVT_SET_PARM,
+ MM_STREAM_EVT_GET_PARM,
+ MM_STREAM_EVT_DO_ACTION,
+ MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+ MM_STREAM_EVT_MAX
+} mm_stream_evt_type_t;
+
+typedef struct {
+ mm_camera_buf_notify_t cb;
+ void *user_data;
+ /* cb_count = -1: infinite
+ * cb_count > 0: register only for required times */
+ int8_t cb_count;
+ mm_camera_stream_cb_type cb_type;
+} mm_stream_data_cb_t;
+
+typedef struct {
+ /* buf reference count */
+ uint8_t buf_refcnt;
+
+ /* This flag is to indicate if after allocation,
+ * the corresponding buf needs to qbuf into kernel
+ * (e.g. for preview usecase, display needs to hold two bufs,
+ * so no need to qbuf these two bufs initially) */
+ uint8_t initial_reg_flag;
+
+ /* indicate if buf is in kernel(1) or client(0) */
+ uint8_t in_kernel;
+ /*indicate if this buffer is mapped to daemon*/
+ int8_t map_status;
+} mm_stream_buf_status_t;
+
+typedef struct mm_stream {
+ uint32_t my_hdl; /* local stream id */
+ uint32_t server_stream_id; /* stream id from server */
+ int32_t fd;
+ mm_stream_state_type_t state;
+
+ /* stream info*/
+ cam_stream_info_t *stream_info;
+
+ /* padding info */
+ cam_padding_info_t padding_info;
+
+ /* offset */
+ cam_frame_len_offset_t frame_offset;
+
+ pthread_mutex_t cmd_lock; /* lock to protect cmd_thread */
+ mm_camera_cmd_thread_t cmd_thread;
+
+ /* dataCB registered on this stream obj */
+ pthread_mutex_t cb_lock; /* cb lock to protect buf_cb */
+ mm_stream_data_cb_t buf_cb[MM_CAMERA_STREAM_BUF_CB_MAX];
+
+ /* stream buffer management */
+ pthread_mutex_t buf_lock;
+ uint8_t buf_num; /* num of buffers allocated */
+ mm_camera_buf_def_t* buf; /* ptr to buf array */
+ mm_stream_buf_status_t buf_status[CAM_MAX_NUM_BUFS_PER_STREAM]; /* ptr to buf status array */
+
+ uint8_t plane_buf_num; /* num of plane buffers allocated Used only in Batch mode*/
+ mm_camera_buf_def_t *plane_buf; /*Pointer to plane buffer array Used only in Batch mode */
+ int32_t cur_buf_idx; /* Current container buffer active filling. Used only in Batch mode*/
+ uint8_t cur_bufs_staged; /*Number of plane buf freed by HAL for this usr buf*/
+
+
+ /* reference to parent channel_obj */
+ struct mm_channel* ch_obj;
+
+ uint8_t is_bundled; /* flag if stream is bundled */
+
+ /* reference to linked channel_obj */
+ struct mm_channel* linked_obj;
+ struct mm_stream * linked_stream; /* original stream */
+ uint8_t is_linked; /* flag if stream is linked */
+
+ mm_camera_stream_mem_vtbl_t mem_vtbl; /* mem ops tbl */
+
+ mm_camera_map_unmap_ops_tbl_t map_ops;
+
+ int8_t queued_buffer_count;
+
+ /*latest timestamp of this stream frame received & last frameID*/
+ uint32_t prev_frameID;
+ nsecs_t prev_timestamp;
+
+ /* Need to wait for buffer mapping before stream-on*/
+ pthread_cond_t buf_cond;
+} mm_stream_t;
+
+/* mm_channel */
+typedef enum {
+ MM_CHANNEL_STATE_NOTUSED = 0, /* not used */
+ MM_CHANNEL_STATE_STOPPED, /* stopped */
+ MM_CHANNEL_STATE_ACTIVE, /* active, at least one stream active */
+ MM_CHANNEL_STATE_PAUSED, /* paused */
+ MM_CHANNEL_STATE_MAX
+} mm_channel_state_type_t;
+
+typedef enum {
+ MM_CHANNEL_EVT_ADD_STREAM,
+ MM_CHANNEL_EVT_DEL_STREAM,
+ MM_CHANNEL_EVT_LINK_STREAM,
+ MM_CHANNEL_EVT_CONFIG_STREAM,
+ MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+ MM_CHANNEL_EVT_START,
+ MM_CHANNEL_EVT_STOP,
+ MM_CHANNEL_EVT_PAUSE,
+ MM_CHANNEL_EVT_RESUME,
+ MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+ MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+ MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+ MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+ MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+ MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+ MM_CHANNEL_EVT_MAP_STREAM_BUF,
+ MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+ MM_CHANNEL_EVT_SET_STREAM_PARM,
+ MM_CHANNEL_EVT_GET_STREAM_PARM,
+ MM_CHANNEL_EVT_DO_STREAM_ACTION,
+ MM_CHANNEL_EVT_DELETE,
+ MM_CHANNEL_EVT_AF_BRACKETING,
+ MM_CHANNEL_EVT_AE_BRACKETING,
+ MM_CHANNEL_EVT_FLASH_BRACKETING,
+ MM_CHANNEL_EVT_ZOOM_1X,
+ MM_CAMERA_EVT_CAPTURE_SETTING,
+ MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+ MM_CHANNEL_EVT_MAP_STREAM_BUFS,
+ MM_CHANNEL_EVT_REG_STREAM_BUF_CB
+} mm_channel_evt_type_t;
+
+typedef struct {
+ uint32_t stream_id;
+ mm_camera_stream_config_t *config;
+} mm_evt_paylod_config_stream_t;
+
+typedef struct {
+ uint32_t stream_id;
+ cam_stream_parm_buffer_t *parms;
+} mm_evt_paylod_set_get_stream_parms_t;
+
+typedef struct {
+ uint32_t stream_id;
+ void *actions;
+} mm_evt_paylod_do_stream_action_t;
+
+typedef struct {
+ uint32_t stream_id;
+ mm_stream_data_cb_t buf_cb;
+} mm_evt_paylod_reg_stream_buf_cb;
+
+
+typedef struct {
+ uint8_t num_of_bufs;
+ mm_camera_buf_info_t super_buf[MAX_STREAM_NUM_IN_BUNDLE];
+ uint8_t matched;
+ uint8_t expected_frame;
+ uint32_t frame_idx;
+ /* unmatched meta idx needed in case of low priority queue */
+ uint32_t unmatched_meta_idx;
+} mm_channel_queue_node_t;
+
+typedef struct {
+ cam_queue_t que;
+ uint8_t num_streams;
+ /* container for bundled stream handlers */
+ uint32_t bundled_streams[MAX_STREAM_NUM_IN_BUNDLE];
+ mm_camera_channel_attr_t attr;
+ uint32_t expected_frame_id;
+ uint32_t match_cnt;
+ uint32_t expected_frame_id_without_led;
+ uint32_t led_on_start_frame_id;
+ uint32_t led_off_start_frame_id;
+ uint32_t led_on_num_frames;
+ uint32_t once;
+ uint32_t frame_skip_count;
+ uint32_t good_frame_id;
+} mm_channel_queue_t;
+
+typedef struct {
+ uint8_t is_active; /* flag to indicate if bundle is valid */
+ /* queue to store bundled super buffers */
+ mm_channel_queue_t superbuf_queue;
+ mm_camera_buf_notify_t super_buf_notify_cb;
+ void *user_data;
+} mm_channel_bundle_t;
+
+/* Nodes used for frame sync */
+typedef struct {
+ /* Frame idx */
+ uint32_t frame_idx;
+ /* Frame present for corresponding channel*/
+ uint32_t frame_valid[MAX_NUM_CAMERA_PER_BUNDLE];
+ /* Frame present in all channels*/
+ uint32_t matched;
+} mm_channel_sync_node_t;
+
+/* Frame sync information */
+typedef struct {
+ /* Number of camera channels that need to be synced*/
+ uint8_t num_cam;
+ /* position of the next node to be updated */
+ uint8_t pos;
+ /* circular node array used to store frame information */
+ mm_channel_sync_node_t node[MM_CAMERA_FRAME_SYNC_NODES];
+ /* Channel corresponding to each camera */
+ struct mm_channel *ch_obj[MAX_NUM_CAMERA_PER_BUNDLE];
+ /* Cb corresponding to each camera */
+ mm_camera_buf_notify_t cb[MAX_NUM_CAMERA_PER_BUNDLE];
+} mm_channel_frame_sync_info_t;
+
+/* Node information for multiple superbuf callbacks
+* This can be used to batch nodes before sending to upper layer */
+typedef struct {
+ /* Number of nodes to be sent*/
+ uint8_t num_nodes;
+ /* queue node information*/
+ mm_channel_queue_node_t *node[MAX_NUM_CAMERA_PER_BUNDLE];
+ /* channel information*/
+ struct mm_channel *ch_obj[MAX_NUM_CAMERA_PER_BUNDLE];
+} mm_channel_node_info_t;
+
+typedef enum {
+ MM_CHANNEL_BRACKETING_STATE_OFF,
+ MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX,
+ MM_CHANNEL_BRACKETING_STATE_ACTIVE,
+} mm_channel_bracketing_state_t;
+
+typedef struct mm_channel {
+ uint32_t my_hdl;
+ mm_channel_state_type_t state;
+ pthread_mutex_t ch_lock; /* channel lock */
+
+ /* stream bundle info in the channel */
+ mm_channel_bundle_t bundle;
+
+ /* num of pending suferbuffers */
+ uint32_t pending_cnt;
+ uint32_t pending_retro_cnt;
+ mm_camera_req_buf_type_t req_type;
+ uint32_t bWaitForPrepSnapshotDone;
+ uint32_t unLockAEC;
+ /* num of pending suferbuffers */
+ uint8_t stopZslSnapshot;
+
+ /* cmd thread for superbuffer dataCB and async stop*/
+ mm_camera_cmd_thread_t cmd_thread;
+
+ /* cb thread for sending data cb */
+ mm_camera_cmd_thread_t cb_thread;
+
+ /* data poll thread
+ * currently one data poll thread per channel
+ * could extended to support one data poll thread per stream in the channel */
+ mm_camera_poll_thread_t poll_thread[MM_CAMERA_CHANNEL_POLL_THREAD_MAX];
+
+ /* container for all streams in channel */
+ mm_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+
+ /* reference to parent cam_obj */
+ struct mm_camera_obj* cam_obj;
+
+ /* manual zsl snapshot control */
+ uint8_t manualZSLSnapshot;
+
+ /* control for zsl led */
+ uint8_t startZSlSnapshotCalled;
+ uint8_t needLEDFlash;
+ mm_channel_bracketing_state_t bracketingState;
+ uint8_t isFlashBracketingEnabled;
+ uint8_t isZoom1xFrameRequested;
+ uint32_t burstSnapNum;
+ char threadName[THREAD_NAME_SIZE];
+
+ /*Buffer diverted*/
+ uint8_t diverted_frame_id;
+ uint32_t sessionid;
+
+ /*Frame capture configaration*/
+ uint8_t isConfigCapture;
+ uint8_t cur_capture_idx;
+ uint32_t capture_frame_id[MAX_CAPTURE_BATCH_NUM];
+ cam_capture_frame_config_t frameConfig;
+ uint8_t needLowLightZSL;
+} mm_channel_t;
+
+typedef struct {
+ mm_channel_t *ch;
+ uint32_t stream_id;
+} mm_camera_stream_link_t;
+
+/* struct to store information about pp cookie*/
+typedef struct {
+ uint32_t cam_hdl;
+ uint32_t ch_hdl;
+ uint32_t stream_hdl;
+ mm_channel_queue_node_t* super_buf;
+} mm_channel_pp_info_t;
+
+/* mm_camera */
+typedef struct {
+ mm_camera_event_notify_t evt_cb;
+ void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+ mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+ /* reg_count <=0: infinite
+ * reg_count > 0: register only for required times */
+ int reg_count;
+} mm_camera_evt_obj_t;
+
+typedef struct mm_camera_obj {
+ uint32_t my_hdl;
+ int ref_count;
+ int32_t ctrl_fd;
+ int32_t ds_fd; /* domain socket fd */
+ pthread_mutex_t cam_lock;
+ pthread_mutex_t cb_lock; /* lock for evt cb */
+ mm_channel_t ch[MM_CAMERA_CHANNEL_MAX];
+ mm_camera_evt_obj_t evt;
+ mm_camera_poll_thread_t evt_poll_thread; /* evt poll thread */
+ mm_camera_cmd_thread_t evt_thread; /* thread for evt CB */
+ mm_camera_vtbl_t vtbl;
+
+ pthread_mutex_t evt_lock;
+ pthread_cond_t evt_cond;
+ mm_camera_event_t evt_rcvd;
+
+ pthread_mutex_t msg_lock; /* lock for sending msg through socket */
+ uint32_t sessionid; /* Camera server session id */
+} mm_camera_obj_t;
+
+typedef struct {
+ int8_t num_cam;
+ char video_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+ mm_camera_obj_t *cam_obj[MM_CAMERA_MAX_NUM_SENSORS];
+ struct camera_info info[MM_CAMERA_MAX_NUM_SENSORS];
+ cam_sync_type_t cam_type[MM_CAMERA_MAX_NUM_SENSORS];
+ cam_sync_mode_t cam_mode[MM_CAMERA_MAX_NUM_SENSORS];
+ uint8_t is_yuv[MM_CAMERA_MAX_NUM_SENSORS]; // 1=CAM_SENSOR_YUV, 0=CAM_SENSOR_RAW
+} mm_camera_ctrl_t;
+
+typedef enum {
+ mm_camera_async_call,
+ mm_camera_sync_call
+} mm_camera_call_type_t;
+
+/**********************************************************************************
+* external function declare
+***********************************************************************************/
+/* utility functions */
+/* set int32_t value */
+extern int32_t mm_camera_util_s_ctrl(int32_t fd,
+ uint32_t id,
+ int32_t *value);
+
+/* get int32_t value */
+extern int32_t mm_camera_util_g_ctrl(int32_t fd,
+ uint32_t id,
+ int32_t *value);
+
+/* send msg throught domain socket for fd mapping */
+extern int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ size_t buf_size,
+ int sendfd);
+
+/* send msg through domain socket for bundled fd mapping */
+extern int32_t mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ size_t buf_size,
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+ int numfds);
+
+/* Check if hardware target is A family */
+uint8_t mm_camera_util_chip_is_a_family(void);
+
+/* mm-camera */
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data);
+extern int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf);
+extern int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, uint32_t stream_id);
+extern int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+ parm_buffer_t *parms);
+extern int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+ parm_buffer_t *parms);
+extern int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+ uint8_t buf_type,
+ int fd,
+ size_t size);
+extern int32_t mm_camera_map_bufs(mm_camera_obj_t *my_obj,
+ const cam_buf_map_type_list *buf_map_list);
+extern int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+ uint8_t buf_type);
+extern int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+ int32_t do_af_flag);
+extern int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_flush(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata);
+extern int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ cam_bundle_config_t *bundle_info);
+extern uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id);
+extern uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint32_t linked_ch_id);
+
+extern int32_t mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+ mm_camera_stream_cb_type cb_type, void *userdata);
+
+extern int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+extern int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, mm_camera_req_buf_t *buf);
+extern int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t frame_idx);
+extern int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_super_buf_notify_mode_t notify_mode);
+extern int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data);
+extern int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size);
+extern int32_t mm_camera_map_stream_bufs(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ const cam_buf_map_type_list *buf_map_list);
+extern int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx);
+extern int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ void *actions);
+extern int32_t mm_camera_get_session_id(mm_camera_obj_t *my_obj,
+ uint32_t* sessionid);
+extern int32_t mm_camera_sync_related_sensors(mm_camera_obj_t *my_obj,
+ cam_sync_related_sensors_event_info_t *parms);
+
+/* mm_channel */
+extern int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+extern int32_t mm_channel_init(mm_channel_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata);
+/* qbuf is a special case that not going through state machine.
+ * This is to avoid deadlock when trying to aquire ch_lock,
+ * from the context of dataCB, but async stop is holding ch_lock */
+extern int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf);
+/* mm_stream */
+extern int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+/* Function to register special callback for stream buffer*/
+extern int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+ mm_stream_data_cb_t val);
+extern int32_t mm_stream_map_buf(mm_stream_t *my_obj,
+ uint8_t buf_type,
+ uint32_t frame_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size);
+extern int32_t mm_stream_map_bufs(mm_stream_t *my_obj,
+ const cam_buf_map_type_list *buf_map_list);
+extern int32_t mm_stream_unmap_buf(mm_stream_t *my_obj,
+ uint8_t buf_type,
+ uint32_t frame_idx,
+ int32_t plane_idx);
+
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_camera_util_generate_handler(uint8_t index);
+const char * mm_camera_util_get_dev_name(uint32_t cam_handler);
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler);
+
+/* poll/cmd thread functions */
+extern int32_t mm_camera_poll_thread_launch(
+ mm_camera_poll_thread_t * poll_cb,
+ mm_camera_poll_thread_type_t poll_type);
+extern int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb);
+extern int32_t mm_camera_poll_thread_add_poll_fd(
+ mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ int32_t fd,
+ mm_camera_poll_notify_t nofity_cb,
+ void *userdata,
+ mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_del_poll_fd(
+ mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_commit_updates(
+ mm_camera_poll_thread_t * poll_cb);
+extern int32_t mm_camera_cmd_thread_launch(
+ mm_camera_cmd_thread_t * cmd_thread,
+ mm_camera_cmd_cb_t cb,
+ void* user_data);
+extern int32_t mm_camera_cmd_thread_name(const char* name);
+extern int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread);
+
+extern int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, mm_camera_advanced_capture_t type,
+ uint32_t trigger, void *in_value);
+#endif /* __MM_CAMERA_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
new file mode 100644
index 0000000..8298c78
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
@@ -0,0 +1,134 @@
+/* Copyright (c) 2012, 2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+// System dependencies
+#include <utils/Log.h>
+
+#ifdef QCAMERA_REDEFINE_LOG
+
+// Camera dependencies
+#include "cam_types.h"
+
+typedef enum {
+ CAM_NO_MODULE,
+ CAM_HAL_MODULE,
+ CAM_MCI_MODULE,
+ CAM_JPEG_MODULE,
+ CAM_LAST_MODULE
+} cam_modules_t;
+
+/* values that persist.camera.global.debug can be set to */
+/* all camera modules need to map their internal debug levels to this range */
+typedef enum {
+ CAM_GLBL_DBG_NONE = 0,
+ CAM_GLBL_DBG_ERR = 1,
+ CAM_GLBL_DBG_WARN = 2,
+ CAM_GLBL_DBG_HIGH = 3,
+ CAM_GLBL_DBG_DEBUG = 4,
+ CAM_GLBL_DBG_LOW = 5,
+ CAM_GLBL_DBG_INFO = 6
+} cam_global_debug_level_t;
+
+extern int g_cam_log[CAM_LAST_MODULE][CAM_GLBL_DBG_INFO + 1];
+
+#define FATAL_IF(cond, ...) LOG_ALWAYS_FATAL_IF(cond, ## __VA_ARGS__)
+
+#undef CLOGx
+#define CLOGx(module, level, fmt, args...) \
+{\
+if (g_cam_log[module][level]) { \
+ mm_camera_debug_log(module, level, __func__, __LINE__, fmt, ##args); \
+}\
+}
+
+#undef CLOGI
+#define CLOGI(module, fmt, args...) \
+ CLOGx(module, CAM_GLBL_DBG_INFO, fmt, ##args)
+#undef CLOGD
+#define CLOGD(module, fmt, args...) \
+ CLOGx(module, CAM_GLBL_DBG_DEBUG, fmt, ##args)
+#undef CLOGL
+#define CLOGL(module, fmt, args...) \
+ CLOGx(module, CAM_GLBL_DBG_LOW, fmt, ##args)
+#undef CLOGW
+#define CLOGW(module, fmt, args...) \
+ CLOGx(module, CAM_GLBL_DBG_WARN, fmt, ##args)
+#undef CLOGH
+#define CLOGH(module, fmt, args...) \
+ CLOGx(module, CAM_GLBL_DBG_HIGH, fmt, ##args)
+#undef CLOGE
+#define CLOGE(module, fmt, args...) \
+ CLOGx(module, CAM_GLBL_DBG_ERR, fmt, ##args)
+
+#ifndef CAM_MODULE
+#define CAM_MODULE CAM_MCI_MODULE
+#endif
+
+#undef LOGD
+#define LOGD(fmt, args...) CLOGD(CAM_MODULE, fmt, ##args)
+#undef LOGL
+#define LOGL(fmt, args...) CLOGL(CAM_MODULE, fmt, ##args)
+#undef LOGW
+#define LOGW(fmt, args...) CLOGW(CAM_MODULE, fmt, ##args)
+#undef LOGH
+#define LOGH(fmt, args...) CLOGH(CAM_MODULE, fmt, ##args)
+#undef LOGE
+#define LOGE(fmt, args...) CLOGE(CAM_MODULE, fmt, ##args)
+#undef LOGI
+#define LOGI(fmt, args...) CLOGI(CAM_MODULE, fmt, ##args)
+
+/* reads and updates camera logging properties */
+void mm_camera_set_dbg_log_properties(void);
+
+/* generic logger function */
+void mm_camera_debug_log(const cam_modules_t module,
+ const cam_global_debug_level_t level,
+ const char *func, const int line, const char *fmt, ...);
+
+#else
+
+#undef LOGD
+#define LOGD(fmt, args...) ALOGD(fmt, ##args)
+#undef LOGL
+#define LOGL(fmt, args...) ALOGD(fmt, ##args)
+#undef LOGW
+#define LOGW(fmt, args...) ALOGW(fmt, ##args)
+#undef LOGH
+#define LOGH(fmt, args...) ALOGD(fmt, ##args)
+#undef LOGE
+#define LOGE(fmt, args...) ALOGE(fmt, ##args)
+#undef LOGI
+#define LOGI(fmt, args...) ALOGV(fmt, ##args)
+
+#endif
+
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
new file mode 100644
index 0000000..89d5040
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
@@ -0,0 +1,76 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+// System dependencies
+#define SOCKET_H <SYSTEM_HEADER_PREFIX/socket.h>
+#include SOCKET_H
+#define UN_H <SYSTEM_HEADER_PREFIX/un.h>
+#include UN_H
+
+// Camera dependencies
+#include "cam_types.h"
+
+typedef enum {
+ MM_CAMERA_SOCK_TYPE_UDP,
+ MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+typedef union {
+ struct sockaddr addr;
+ struct sockaddr_un addr_un;
+} mm_camera_sock_addr_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ size_t buf_size,
+ int sendfd);
+
+int mm_camera_socket_bundle_sendmsg(
+ int fd,
+ void *msg,
+ size_t buf_size,
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+ int num_fds);
+
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
new file mode 100644
index 0000000..a559815
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -0,0 +1,2397 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+ (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+ ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+/* internal function declare */
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ uint8_t reg_flag);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+ mm_camera_event_t *event);
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_channel_by_handler
+ *
+ * DESCRIPTION: utility function to get a channel object from its handle
+ *
+ * PARAMETERS :
+ * @cam_obj: ptr to a camera object
+ * @handler: channel handle
+ *
+ * RETURN : ptr to a channel object.
+ * NULL if failed.
+ *==========================================================================*/
+mm_channel_t * mm_camera_util_get_channel_by_handler(
+ mm_camera_obj_t * cam_obj,
+ uint32_t handler)
+{
+ int i;
+ mm_channel_t *ch_obj = NULL;
+ for(i = 0; i < MM_CAMERA_CHANNEL_MAX; i++) {
+ if (handler == cam_obj->ch[i].my_hdl) {
+ ch_obj = &cam_obj->ch[i];
+ break;
+ }
+ }
+ return ch_obj;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_chip_is_a_family
+ *
+ * DESCRIPTION: utility function to check if the host is A family chip
+ *
+ * PARAMETERS :
+ *
+ * RETURN : TRUE if A family.
+ * FALSE otherwise.
+ *==========================================================================*/
+uint8_t mm_camera_util_chip_is_a_family(void)
+{
+#ifdef USE_A_FAMILY
+ return TRUE;
+#else
+ return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_dispatch_app_event
+ *
+ * DESCRIPTION: dispatch event to apps who regitster for event notify
+ *
+ * PARAMETERS :
+ * @cmd_cb: ptr to a struct storing event info
+ * @user_data: user data ptr (camera object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_camera_event_t *event = &cmd_cb->u.evt;
+ mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
+ if (NULL != my_obj) {
+ mm_camera_cmd_thread_name(my_obj->evt_thread.threadName);
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(my_obj->evt.evt[i].evt_cb) {
+ my_obj->evt.evt[i].evt_cb(
+ my_obj->my_hdl,
+ event,
+ my_obj->evt.evt[i].user_data);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_event_notify
+ *
+ * DESCRIPTION: callback to handle event notify from kernel. This call will
+ * dequeue event from kernel.
+ *
+ * PARAMETERS :
+ * @user_data: user data ptr (camera object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_event_notify(void* user_data)
+{
+ struct v4l2_event ev;
+ struct msm_v4l2_event_data *msm_evt = NULL;
+ int rc;
+ mm_camera_event_t evt;
+ memset(&evt, 0, sizeof(mm_camera_event_t));
+
+ mm_camera_obj_t *my_obj = (mm_camera_obj_t*)user_data;
+ if (NULL != my_obj) {
+ /* read evt */
+ memset(&ev, 0, sizeof(ev));
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_DQEVENT, &ev);
+
+ if (rc >= 0 && ev.id == MSM_CAMERA_MSM_NOTIFY) {
+ msm_evt = (struct msm_v4l2_event_data *)ev.u.data;
+ switch (msm_evt->command) {
+ case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
+ evt.server_event_type = CAM_EVENT_TYPE_DAEMON_PULL_REQ;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ break;
+ case CAM_EVENT_TYPE_MAP_UNMAP_DONE:
+ pthread_mutex_lock(&my_obj->evt_lock);
+ my_obj->evt_rcvd.server_event_type = msm_evt->command;
+ my_obj->evt_rcvd.status = msm_evt->status;
+ pthread_cond_signal(&my_obj->evt_cond);
+ pthread_mutex_unlock(&my_obj->evt_lock);
+ break;
+ case CAM_EVENT_TYPE_INT_TAKE_JPEG:
+ case CAM_EVENT_TYPE_INT_TAKE_RAW:
+ {
+ evt.server_event_type = msm_evt->command;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ }
+ break;
+ case MSM_CAMERA_PRIV_SHUTDOWN:
+ {
+ LOGE("Camera Event DAEMON DIED received");
+ evt.server_event_type = CAM_EVENT_TYPE_DAEMON_DIED;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ }
+ break;
+ case CAM_EVENT_TYPE_CAC_DONE:
+ {
+ evt.server_event_type = CAM_EVENT_TYPE_CAC_DONE;
+ mm_camera_enqueue_evt(my_obj, &evt);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_enqueue_evt
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ * event thread.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @event : event to be queued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+ mm_camera_event_t *event)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t *node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_EVT_CB;
+ node->u.evt = *event;
+
+ /* enqueue to evt cmd thread */
+ cam_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
+ /* wake up evt cmd thread */
+ cam_sem_post(&(my_obj->evt_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_open
+ *
+ * DESCRIPTION: open a camera
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_open(mm_camera_obj_t *my_obj)
+{
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+ int32_t rc = 0;
+ int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+ uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+ int cam_idx = 0;
+ const char *dev_name_value = NULL;
+ int l_errno = 0;
+
+ LOGD("begin\n");
+
+ if (NULL == my_obj) {
+ goto on_error;
+ }
+ dev_name_value = mm_camera_util_get_dev_name(my_obj->my_hdl);
+ if (NULL == dev_name_value) {
+ goto on_error;
+ }
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+ dev_name_value);
+ sscanf(dev_name, "/dev/video%d", &cam_idx);
+ LOGD("dev name = %s, cam_idx = %d", dev_name, cam_idx);
+
+ do{
+ n_try--;
+ errno = 0;
+ my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ l_errno = errno;
+ LOGD("ctrl_fd = %d, errno == %d", my_obj->ctrl_fd, l_errno);
+ if((my_obj->ctrl_fd >= 0) || (errno != EIO && errno != ETIMEDOUT) || (n_try <= 0 )) {
+ LOGH("opened, break out while loop");
+ break;
+ }
+ LOGE("Failed with %s error, retrying after %d milli-seconds",
+ strerror(errno), sleep_msec);
+ usleep(sleep_msec * 1000U);
+ }while (n_try > 0);
+
+ if (my_obj->ctrl_fd < 0) {
+ LOGE("cannot open control fd of '%s' (%s)\n",
+ dev_name, strerror(l_errno));
+ if (l_errno == EBUSY)
+ rc = -EUSERS;
+ else
+ rc = -1;
+ goto on_error;
+ }
+
+ /* open domain socket*/
+ n_try = MM_CAMERA_DEV_OPEN_TRIES;
+ do {
+ n_try--;
+ my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
+ l_errno = errno;
+ LOGD("ds_fd = %d, errno = %d", my_obj->ds_fd, l_errno);
+ if((my_obj->ds_fd >= 0) || (n_try <= 0 )) {
+ LOGD("opened, break out while loop");
+ break;
+ }
+ LOGD("failed with I/O error retrying after %d milli-seconds",
+ sleep_msec);
+ usleep(sleep_msec * 1000U);
+ } while (n_try > 0);
+
+ if (my_obj->ds_fd < 0) {
+ LOGE("cannot open domain socket fd of '%s'(%s)\n",
+ dev_name, strerror(l_errno));
+ rc = -1;
+ goto on_error;
+ }
+ pthread_mutex_init(&my_obj->msg_lock, NULL);
+
+ pthread_mutex_init(&my_obj->cb_lock, NULL);
+ pthread_mutex_init(&my_obj->evt_lock, NULL);
+ pthread_cond_init(&my_obj->evt_cond, NULL);
+
+ LOGD("Launch evt Thread in Cam Open");
+ snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Dispatch");
+ mm_camera_cmd_thread_launch(&my_obj->evt_thread,
+ mm_camera_dispatch_app_event,
+ (void *)my_obj);
+
+ /* launch event poll thread
+ * we will add evt fd into event poll thread upon user first register for evt */
+ LOGD("Launch evt Poll Thread in Cam Open");
+ snprintf(my_obj->evt_poll_thread.threadName, THREAD_NAME_SIZE, "CAM_evntPoll");
+ mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
+ MM_CAMERA_POLL_TYPE_EVT);
+ mm_camera_evt_sub(my_obj, TRUE);
+
+ /* unlock cam_lock, we need release global intf_lock in camera_open(),
+ * in order not block operation of other Camera in dual camera use case.*/
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ LOGD("end (rc = %d)\n", rc);
+ return rc;
+
+on_error:
+
+ if (NULL == dev_name_value) {
+ LOGE("Invalid device name\n");
+ rc = -1;
+ }
+
+ if (NULL == my_obj) {
+ LOGE("Invalid camera object\n");
+ rc = -1;
+ } else {
+ if (my_obj->ctrl_fd >= 0) {
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ }
+ if (my_obj->ds_fd >= 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = -1;
+ }
+ }
+
+ /* unlock cam_lock, we need release global intf_lock in camera_open(),
+ * in order not block operation of other Camera in dual camera use case.*/
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_close
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ * event thread.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @event : event to be queued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+ LOGD("unsubscribe evt");
+ mm_camera_evt_sub(my_obj, FALSE);
+
+ LOGD("Close evt Poll Thread in Cam Close");
+ mm_camera_poll_thread_release(&my_obj->evt_poll_thread);
+
+ LOGD("Close evt cmd Thread in Cam Close");
+ mm_camera_cmd_thread_release(&my_obj->evt_thread);
+
+ if(my_obj->ctrl_fd >= 0) {
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ }
+ if(my_obj->ds_fd >= 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = -1;
+ }
+ pthread_mutex_destroy(&my_obj->msg_lock);
+
+ pthread_mutex_destroy(&my_obj->cb_lock);
+ pthread_mutex_destroy(&my_obj->evt_lock);
+ pthread_cond_destroy(&my_obj->evt_cond);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_register_event_notify_internal
+ *
+ * DESCRIPTION: internal implementation for registering callback for event notify.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @evt_cb : callback to be registered to handle event notify
+ * @user_data: user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data)
+{
+ int i;
+ int rc = -1;
+ mm_camera_evt_obj_t *evt_array = NULL;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ evt_array = &my_obj->evt;
+ if(evt_cb) {
+ /* this is reg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == NULL) {
+ evt_array->evt[i].evt_cb = evt_cb;
+ evt_array->evt[i].user_data = user_data;
+ evt_array->reg_count++;
+ rc = 0;
+ break;
+ }
+ }
+ } else {
+ /* this is unreg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == user_data) {
+ evt_array->evt[i].evt_cb = NULL;
+ evt_array->evt[i].user_data = NULL;
+ evt_array->reg_count--;
+ rc = 0;
+ break;
+ }
+ }
+ }
+
+ pthread_mutex_unlock(&my_obj->cb_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_register_event_notify
+ *
+ * DESCRIPTION: registering a callback for event notify.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a camera object
+ * @evt_cb : callback to be registered to handle event notify
+ * @user_data: user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data)
+{
+ int rc = -1;
+ rc = mm_camera_register_event_notify_internal(my_obj,
+ evt_cb,
+ user_data);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @buf : buf ptr to be enqueued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf)
+{
+ int rc = -1;
+ mm_channel_t * ch_obj = NULL;
+ ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ /* we always assume qbuf will be done before channel/stream is fully stopped
+ * because qbuf is done within dataCB context
+ * in order to avoid deadlock, we are not locking ch_lock for qbuf */
+ if (NULL != ch_obj) {
+ rc = mm_channel_qbuf(ch_obj, buf);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream id
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, uint32_t stream_id)
+{
+ int rc = -1;
+ mm_channel_t * ch_obj = NULL;
+ uint32_t payload;
+ ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ payload = stream_id;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ * @my_obj: camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_capability cap;
+
+ /* get camera capabilities */
+ memset(&cap, 0, sizeof(cap));
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_QUERYCAP, &cap);
+ if (rc != 0) {
+ LOGE("cannot get camera capabilities, rc = %d, errno %d",
+ rc, errno);
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (parms != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (parms != NULL) {
+ rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call success, we will always assume there will
+ * be an auto_focus event following up.
+ *==========================================================================*/
+int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_DO_AUTO_FOCUS, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_CANCEL_AUTO_FOCUS, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @do_af_flag : flag indicating if AF is needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+ int32_t do_af_flag)
+{
+ int32_t rc = -1;
+ int32_t value = do_af_flag;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PREPARE_SNAPSHOT, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_START_ZSL_SNAPSHOT, &value);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl capture
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_STOP_ZSL_SNAPSHOT, &value);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_flush
+ *
+ * DESCRIPTION: flush the current camera state and buffers
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = -1;
+ int32_t value;
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_FLUSH, &value);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @attr : bundle attribute of the channel if needed
+ * @channel_cb : callback function for bundle data notify
+ * @userdata : user data ptr
+ *
+ * RETURN : uint32_t type of channel handle
+ * 0 -- invalid channel handle, meaning the op failed
+ * >0 -- successfully added a channel with a valid handle
+ * NOTE : if no bundle data notify is needed, meaning each stream in the
+ * channel will have its own stream data notify callback, then
+ * attr, channel_cb, and userdata can be NULL. In this case,
+ * no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ mm_channel_t *ch_obj = NULL;
+ uint8_t ch_idx = 0;
+ uint32_t ch_hdl = 0;
+
+ for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
+ if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
+ ch_obj = &my_obj->ch[ch_idx];
+ break;
+ }
+ }
+
+ if (NULL != ch_obj) {
+ /* initialize channel obj */
+ memset(ch_obj, 0, sizeof(mm_channel_t));
+ ch_hdl = mm_camera_util_generate_handler(ch_idx);
+ ch_obj->my_hdl = ch_hdl;
+ ch_obj->state = MM_CHANNEL_STATE_STOPPED;
+ ch_obj->cam_obj = my_obj;
+ pthread_mutex_init(&ch_obj->ch_lock, NULL);
+ ch_obj->sessionid = my_obj->sessionid;
+ mm_channel_init(ch_obj, attr, channel_cb, userdata);
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ return ch_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DELETE,
+ NULL,
+ NULL);
+
+ pthread_mutex_destroy(&ch_obj->ch_lock);
+ memset(ch_obj, 0, sizeof(mm_channel_t));
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @bundle_info : bundle info to be filled in
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ cam_bundle_config_t *bundle_info)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+ (void *)bundle_info,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_link_stream
+ *
+ * DESCRIPTION: link a stream into a channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream that will be linked
+ * @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint32_t linked_ch_id)
+{
+ uint32_t s_hdl = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, linked_ch_id);
+ mm_channel_t * owner_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if ((NULL != ch_obj) && (NULL != owner_obj)) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ mm_camera_stream_link_t stream_link;
+ memset(&stream_link, 0, sizeof(mm_camera_stream_link_t));
+ stream_link.ch = owner_obj;
+ stream_link.stream_id = stream_id;
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_LINK_STREAM,
+ (void*)&stream_link,
+ (void*)&s_hdl);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ uint32_t s_hdl = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_ADD_STREAM,
+ NULL,
+ (void *)&s_hdl);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DEL_STREAM,
+ (void *)&stream_id,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_start_zsl_snapshot_ch
+ *
+ * DESCRIPTION: starts zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_stop_zsl_snapshot_ch
+ *
+ * DESCRIPTION: stops zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ mm_evt_paylod_config_stream_t payload;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_paylod_config_stream_t));
+ payload.stream_id = stream_id;
+ payload.config = config;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CONFIG_STREAM,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_START,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ int32_t rc = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_STOP,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ * frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @num_buf_requested : number of matched frames needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, mm_camera_req_buf_t *buf)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if ((NULL != ch_obj) && (buf != NULL)) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+ (void *)buf, NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ * of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj, uint32_t ch_id,
+ uint32_t frame_idx)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+ (void *)&frame_idx,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_config_channel_notify
+ *
+ * DESCRIPTION: configures the channel notification mode
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_super_buf_notify_mode_t notify_mode)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+ (void *)&notify_mode,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_set_get_stream_parms_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = s_id;
+ payload.parms = parms;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_SET_STREAM_PARM,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_set_get_stream_parms_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = s_id;
+ payload.parms = parms;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_STREAM_PARM,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ * if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @actions : ptr to an action struct buf to be performed by server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the action struct buf is already mapped to server via
+ * domain socket. Actions to be performed by server are already
+ * filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ void *actions)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_do_stream_action_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = stream_id;
+ payload.actions = actions;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DO_STREAM_ACTION,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size)
+{
+ int32_t rc = -1;
+ cam_buf_map_type payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = stream_id;
+ payload.type = buf_type;
+ payload.frame_idx = buf_idx;
+ payload.plane_idx = plane_idx;
+ payload.fd = fd;
+ payload.size = size;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_MAP_STREAM_BUF,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_bufs(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ int32_t rc = -1;
+ cam_buf_map_type_list payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memcpy(&payload, buf_map_list, sizeof(payload));
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_MAP_STREAM_BUFS,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx)
+{
+ int32_t rc = -1;
+ cam_buf_unmap_type payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(payload));
+ payload.stream_id = stream_id;
+ payload.type = buf_type;
+ payload.frame_idx = buf_idx;
+ payload.plane_idx = plane_idx;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_evt_sub
+ *
+ * DESCRIPTION: subscribe/unsubscribe event notify from kernel
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @reg_flag : 1 -- subscribe ; 0 -- unsubscribe
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ uint8_t reg_flag)
+{
+ int32_t rc = 0;
+ struct v4l2_event_subscription sub;
+
+ memset(&sub, 0, sizeof(sub));
+ sub.type = MSM_CAMERA_V4L2_EVENT_TYPE;
+ sub.id = MSM_CAMERA_MSM_NOTIFY;
+ if(FALSE == reg_flag) {
+ /* unsubscribe */
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+ if (rc < 0) {
+ LOGE("unsubscribe event rc = %d, errno %d",
+ rc, errno);
+ return rc;
+ }
+ /* remove evt fd from the polling thraed when unreg the last event */
+ rc = mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread,
+ my_obj->my_hdl,
+ mm_camera_sync_call);
+ } else {
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ if (rc < 0) {
+ LOGE("subscribe event rc = %d, errno %d",
+ rc, errno);
+ return rc;
+ }
+ /* add evt fd to polling thread when subscribe the first event */
+ rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
+ my_obj->my_hdl,
+ my_obj->ctrl_fd,
+ mm_camera_event_notify,
+ (void*)my_obj,
+ mm_camera_sync_call);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_wait_for_event
+ *
+ * DESCRIPTION: utility function to wait for certain events
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @evt_mask : mask for events to be waited. Any of event in the mask would
+ * trigger the wait to end
+ * @status : status of the event
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_camera_util_wait_for_event(mm_camera_obj_t *my_obj,
+ uint32_t evt_mask,
+ uint32_t *status)
+{
+ int32_t rc = 0;
+ struct timespec ts;
+
+ pthread_mutex_lock(&my_obj->evt_lock);
+ while (!(my_obj->evt_rcvd.server_event_type & evt_mask)) {
+ clock_gettime(CLOCK_REALTIME, &ts);
+ ts.tv_sec += WAIT_TIMEOUT;
+ rc = pthread_cond_timedwait(&my_obj->evt_cond, &my_obj->evt_lock, &ts);
+ if (rc) {
+ LOGE("pthread_cond_timedwait of evt_mask 0x%x failed %d",
+ evt_mask, rc);
+ break;
+ }
+ }
+ if (!rc) {
+ *status = my_obj->evt_rcvd.status;
+ } else {
+ *status = MSM_CAMERA_STATUS_FAIL;
+ }
+ /* reset local storage for recieved event for next event */
+ memset(&my_obj->evt_rcvd, 0, sizeof(mm_camera_event_t));
+ pthread_mutex_unlock(&my_obj->evt_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_bundled_sendmsg
+ *
+ * DESCRIPTION: utility function to send bundled msg via domain socket
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @msg : message to be sent
+ * @buf_size : size of the message to be sent
+ * @sendfds : array of file descriptors to be sent
+ * @numfds : number of file descriptors to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ size_t buf_size,
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+ int numfds)
+{
+ int32_t rc = -1;
+ uint32_t status;
+
+ /* need to lock msg_lock, since sendmsg until response back is deemed as one operation*/
+ pthread_mutex_lock(&my_obj->msg_lock);
+ if(mm_camera_socket_bundle_sendmsg(my_obj->ds_fd, msg, buf_size, sendfds, numfds) > 0) {
+ /* wait for event that mapping/unmapping is done */
+ mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+ if (MSM_CAMERA_STATUS_SUCCESS == status) {
+ rc = 0;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->msg_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_sendmsg
+ *
+ * DESCRIPTION: utility function to send msg via domain socket
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @msg : message to be sent
+ * @buf_size : size of the message to be sent
+ * @sendfd : >0 if any file descriptor need to be passed across process
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ size_t buf_size,
+ int sendfd)
+{
+ int32_t rc = -1;
+ uint32_t status;
+
+ /* need to lock msg_lock, since sendmsg until reposonse back is deemed as one operation*/
+ pthread_mutex_lock(&my_obj->msg_lock);
+ if(mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd) > 0) {
+ /* wait for event that mapping/unmapping is done */
+ mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+ if (MSM_CAMERA_STATUS_SUCCESS == status) {
+ rc = 0;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->msg_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+ uint8_t buf_type,
+ int fd,
+ size_t size)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+ packet.payload.buf_map.type = buf_type;
+ packet.payload.buf_map.fd = fd;
+ packet.payload.buf_map.size = size;
+ rc = mm_camera_util_sendmsg(my_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ fd);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_map_bufs
+ *
+ * DESCRIPTION: mapping camera buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_bufs(mm_camera_obj_t *my_obj,
+ const cam_buf_map_type_list* buf_map_list)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
+
+ memcpy(&packet.payload.buf_map_list, buf_map_list,
+ sizeof(packet.payload.buf_map_list));
+
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
+ uint32_t numbufs = packet.payload.buf_map_list.length;
+ uint32_t i;
+ for (i = 0; i < numbufs; i++) {
+ sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
+ }
+
+ for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
+ packet.payload.buf_map_list.buf_maps[i].fd = -1;
+ sendfds[i] = -1;
+ }
+
+ rc = mm_camera_util_bundled_sendmsg(my_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ sendfds,
+ numbufs);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+ uint8_t buf_type)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+ packet.payload.buf_unmap.type = buf_type;
+ rc = mm_camera_util_sendmsg(my_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ -1);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_s_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for s_ctrl
+ *
+ * PARAMETERS :
+ * @fd : file descritpor for sending ioctl
+ * @id : control id
+ * @value : value of the ioctl to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_s_ctrl(int32_t fd, uint32_t id, int32_t *value)
+{
+ int rc = 0;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ if (value != NULL) {
+ control.value = *value;
+ }
+ rc = ioctl(fd, VIDIOC_S_CTRL, &control);
+
+ LOGD("fd=%d, S_CTRL, id=0x%x, value = %p, rc = %d\n",
+ fd, id, value, rc);
+ if (rc < 0) {
+ LOGE("ioctl failed %d, errno %d", rc, errno);
+ } else if (value != NULL) {
+ *value = control.value;
+ }
+ return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_g_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for g_ctrl
+ *
+ * PARAMETERS :
+ * @fd : file descritpor for sending ioctl
+ * @id : control id
+ * @value : value of the ioctl to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+ int rc = 0;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ if (value != NULL) {
+ control.value = *value;
+ }
+ rc = ioctl(fd, VIDIOC_G_CTRL, &control);
+ LOGD("fd=%d, G_CTRL, id=0x%x, rc = %d\n", fd, id, rc);
+ if (value != NULL) {
+ *value = control.value;
+ }
+ return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_channel_advanced_capture
+ *
+ * DESCRIPTION: sets the channel advanced capture
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @type : advanced capture type.
+ * @start_flag : flag to indicate start/stop
+ * @in_value : input configaration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, mm_camera_advanced_capture_t type,
+ uint32_t trigger, void *in_value)
+{
+ LOGD("E type = %d", type);
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ switch (type) {
+ case MM_CAMERA_AF_BRACKETING:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_AF_BRACKETING,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_AE_BRACKETING:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_AE_BRACKETING,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_FLASH_BRACKETING:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_FLASH_BRACKETING,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_ZOOM_1X:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_ZOOM_1X,
+ (void *)&trigger,
+ NULL);
+ break;
+ case MM_CAMERA_FRAME_CAPTURE:
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CAMERA_EVT_CAPTURE_SETTING,
+ (void *)in_value,
+ NULL);
+ break;
+ default:
+ break;
+ }
+
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ LOGD("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_get_session_id
+ *
+ * DESCRIPTION: get the session identity
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @sessionid: pointer to the output session id
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call succeeds, we will get a valid session id
+ *==========================================================================*/
+int32_t mm_camera_get_session_id(mm_camera_obj_t *my_obj,
+ uint32_t* sessionid)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if(sessionid != NULL) {
+ rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd,
+ MSM_CAMERA_PRIV_G_SESSION_ID, &value);
+ LOGD("fd=%d, get_session_id, id=0x%x, value = %d, rc = %d\n",
+ my_obj->ctrl_fd, MSM_CAMERA_PRIV_G_SESSION_ID,
+ value, rc);
+ *sessionid = value;
+ my_obj->sessionid = value;
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_sync_related_sensors
+ *
+ * DESCRIPTION: send sync cmd
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @parms : ptr to the related cam info to be sent to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the sync struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_sync_related_sensors(mm_camera_obj_t *my_obj,
+ cam_sync_related_sensors_event_info_t* parms)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (parms != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ CAM_PRIV_SYNC_RELATED_SENSORS, &value);
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_reg_stream_buf_cb
+ *
+ * DESCRIPTION: Register callback for stream buffer
+ *
+ * PARAMETERS :
+ * @my_obj : camera object
+ * @ch_id : channel handle
+ * @stream_id : stream that will be linked
+ * @buf_cb : special callback needs to be registered for stream buffer
+ * @cb_type : Callback type SYNC/ASYNC
+ * @userdata : user data pointer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * 1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,
+ uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t stream_cb,
+ mm_camera_stream_cb_type cb_type, void *userdata)
+{
+ int rc = 0;
+ mm_stream_data_cb_t buf_cb;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&buf_cb, 0, sizeof(mm_stream_data_cb_t));
+ buf_cb.cb = stream_cb;
+ buf_cb.cb_count = -1;
+ buf_cb.cb_type = cb_type;
+ buf_cb.user_data = userdata;
+
+ mm_evt_paylod_reg_stream_buf_cb payload;
+ memset(&payload, 0, sizeof(mm_evt_paylod_reg_stream_buf_cb));
+ payload.buf_cb = buf_cb;
+ payload.stream_id = stream_id;
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_REG_STREAM_BUF_CB,
+ (void*)&payload, NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+#ifdef QCAMERA_REDEFINE_LOG
+
+/*===========================================================================
+ * DESCRIPTION: mm camera debug interface
+ *
+ *==========================================================================*/
+pthread_mutex_t dbg_log_mutex;
+
+#undef LOG_TAG
+#define LOG_TAG "QCamera"
+#define CDBG_MAX_STR_LEN 1024
+#define CDBG_MAX_LINE_LENGTH 256
+
+/* current trace loggin permissions
+ * {NONE, ERR, WARN, HIGH, DEBUG, LOW, INFO} */
+int g_cam_log[CAM_LAST_MODULE][CAM_GLBL_DBG_INFO + 1] = {
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_NO_MODULE */
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_HAL_MODULE */
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_MCI_MODULE */
+ {0, 1, 0, 0, 0, 0, 1}, /* CAM_JPEG_MODULE */
+};
+
+/* string representation for logging level */
+static const char *cam_dbg_level_to_str[] = {
+ "", /* CAM_GLBL_DBG_NONE */
+ "<ERROR>", /* CAM_GLBL_DBG_ERR */
+ "<WARN>", /* CAM_GLBL_DBG_WARN */
+ "<HIGH>", /* CAM_GLBL_DBG_HIGH */
+ "<DBG>", /* CAM_GLBL_DBG_DEBUG */
+ "<LOW>", /* CAM_GLBL_DBG_LOW */
+ "<INFO>" /* CAM_GLBL_DBG_INFO */
+};
+
+/* current trace logging configuration */
+typedef struct {
+ cam_global_debug_level_t level;
+ int initialized;
+ const char *name;
+ const char *prop;
+} module_debug_t;
+
+static module_debug_t cam_loginfo[(int)CAM_LAST_MODULE] = {
+ {CAM_GLBL_DBG_ERR, 1,
+ "", "persist.camera.global.debug" }, /* CAM_NO_MODULE */
+ {CAM_GLBL_DBG_ERR, 1,
+ "<HAL>", "persist.camera.hal.debug" }, /* CAM_HAL_MODULE */
+ {CAM_GLBL_DBG_ERR, 1,
+ "<MCI>", "persist.camera.mci.debug" }, /* CAM_MCI_MODULE */
+ {CAM_GLBL_DBG_ERR, 1,
+ "<JPEG>", "persist.camera.mmstill.logs" }, /* CAM_JPEG_MODULE */
+};
+
+/** cam_get_dbg_level
+ *
+ * @module: module name
+ * @level: module debug logging level
+ *
+ * Maps debug log string to value.
+ *
+ * Return: logging level
+ **/
+__unused
+static cam_global_debug_level_t cam_get_dbg_level(const char *module,
+ char *pValue) {
+
+ cam_global_debug_level_t rc = CAM_GLBL_DBG_NONE;
+
+ if (!strcmp(pValue, "none")) {
+ rc = CAM_GLBL_DBG_NONE;
+ } else if (!strcmp(pValue, "warn")) {
+ rc = CAM_GLBL_DBG_WARN;
+ } else if (!strcmp(pValue, "debug")) {
+ rc = CAM_GLBL_DBG_DEBUG;
+ } else if (!strcmp(pValue, "error")) {
+ rc = CAM_GLBL_DBG_ERR;
+ } else if (!strcmp(pValue, "low")) {
+ rc = CAM_GLBL_DBG_LOW;
+ } else if (!strcmp(pValue, "high")) {
+ rc = CAM_GLBL_DBG_HIGH;
+ } else if (!strcmp(pValue, "info")) {
+ rc = CAM_GLBL_DBG_INFO;
+ } else {
+ ALOGE("Invalid %s debug log level %s\n", module, pValue);
+ }
+
+ ALOGD("%s debug log level: %s\n", module, cam_dbg_level_to_str[rc]);
+
+ return rc;
+}
+
+/** cam_vsnprintf
+ * @pdst: destination buffer pointer
+ * @size: size of destination b uffer
+ * @pfmt: string format
+ * @argptr: variabkle length argument list
+ *
+ * Processes variable length argument list to a formatted string.
+ *
+ * Return: n/a
+ **/
+static void cam_vsnprintf(char* pdst, unsigned int size,
+ const char* pfmt, va_list argptr) {
+ int num_chars_written = 0;
+
+ pdst[0] = '\0';
+ num_chars_written = vsnprintf(pdst, size, pfmt, argptr);
+
+ if ((num_chars_written >= (int)size) && (size > 0)) {
+ /* Message length exceeds the buffer limit size */
+ num_chars_written = size - 1;
+ pdst[size - 1] = '\0';
+ }
+}
+
+/** mm_camera_debug_log
+ * @module: origin or log message
+ * @level: logging level
+ * @func: caller function name
+ * @line: caller line number
+ * @fmt: log message formatting string
+ * @...: variable argument list
+ *
+ * Generig logger method.
+ *
+ * Return: N/A
+ **/
+void mm_camera_debug_log(const cam_modules_t module,
+ const cam_global_debug_level_t level,
+ const char *func, const int line, const char *fmt, ...) {
+ char str_buffer[CDBG_MAX_STR_LEN];
+ va_list args;
+
+ va_start(args, fmt);
+ cam_vsnprintf(str_buffer, CDBG_MAX_STR_LEN, fmt, args);
+ va_end(args);
+
+ switch (level) {
+ case CAM_GLBL_DBG_WARN:
+ ALOGW("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ break;
+ case CAM_GLBL_DBG_ERR:
+ ALOGE("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ break;
+ case CAM_GLBL_DBG_INFO:
+ ALOGI("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ break;
+ case CAM_GLBL_DBG_HIGH:
+ case CAM_GLBL_DBG_DEBUG:
+ case CAM_GLBL_DBG_LOW:
+ default:
+ ALOGD("%s%s %s: %d: %s", cam_loginfo[module].name,
+ cam_dbg_level_to_str[level], func, line, str_buffer);
+ }
+}
+
+ /** mm_camera_set_dbg_log_properties
+ *
+ * Set global and module log level properties.
+ *
+ * Return: N/A
+ **/
+void mm_camera_set_dbg_log_properties(void) {
+ int i;
+ unsigned int j;
+ static int boot_init = 1;
+ char property_value[PROPERTY_VALUE_MAX] = {0};
+ char default_value[PROPERTY_VALUE_MAX] = {0};
+
+ if (boot_init) {
+ boot_init = 0;
+ pthread_mutex_init(&dbg_log_mutex, 0);
+ }
+
+ /* set global and individual module logging levels */
+ pthread_mutex_lock(&dbg_log_mutex);
+ for (i = CAM_NO_MODULE; i < CAM_LAST_MODULE; i++) {
+ cam_global_debug_level_t log_level;
+ snprintf(default_value, PROPERTY_VALUE_MAX, "%d", (int)cam_loginfo[i].level);
+ property_get(cam_loginfo[i].prop, property_value, default_value);
+ log_level = (cam_global_debug_level_t)atoi(property_value);
+
+ /* fix KW warnings */
+ if (log_level > CAM_GLBL_DBG_INFO) {
+ log_level = CAM_GLBL_DBG_INFO;
+ }
+
+ cam_loginfo[i].level = log_level;
+
+ /* The logging macros will produce a log message when logging level for
+ * a module is less or equal to the level specified in the property for
+ * the module, or less or equal the level specified by the global logging
+ * property. Currently we don't allow INFO logging to be turned off */
+ for (j = CAM_GLBL_DBG_ERR; j <= CAM_GLBL_DBG_LOW; j++) {
+ g_cam_log[i][j] = (cam_loginfo[CAM_NO_MODULE].level != CAM_GLBL_DBG_NONE) &&
+ (cam_loginfo[i].level != CAM_GLBL_DBG_NONE) &&
+ ((j <= cam_loginfo[i].level) ||
+ (j <= cam_loginfo[CAM_NO_MODULE].level));
+ }
+ }
+ pthread_mutex_unlock(&dbg_log_mutex);
+}
+
+#endif
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
new file mode 100644
index 0000000..7807534
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -0,0 +1,3639 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <fcntl.h>
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler);
+extern mm_channel_t * mm_camera_util_get_channel_by_handler(mm_camera_obj_t * cam_obj,
+ uint32_t handler);
+/* Static frame sync info used between different camera channels*/
+static mm_channel_frame_sync_info_t fs = { .num_cam =0, .pos = 0};
+/* Frame sync info access lock */
+static pthread_mutex_t fs_lock = PTHREAD_MUTEX_INITIALIZER;
+
+/* internal function declare goes here */
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf);
+int32_t mm_channel_init(mm_channel_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata);
+void mm_channel_release(mm_channel_t *my_obj);
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj);
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+ uint32_t stream_id);
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+ mm_camera_stream_link_t *stream_link);
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+ cam_bundle_config_t *bundle_info);
+int32_t mm_channel_start(mm_channel_t *my_obj);
+int32_t mm_channel_stop(mm_channel_t *my_obj);
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+ mm_camera_req_buf_t *buf);
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj);
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj,
+ uint32_t frame_idx,
+ cam_stream_type_t stream_type);
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+ mm_camera_super_buf_notify_mode_t notify_mode);
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue, cam_stream_type_t cam_type);
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj,
+ uint32_t stream_id);
+
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+ mm_evt_paylod_do_stream_action_t *payload);
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+ cam_buf_map_type *payload);
+int32_t mm_channel_map_stream_bufs(mm_channel_t *my_obj,
+ cam_buf_map_type_list *payload);
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+ cam_buf_unmap_type *payload);
+
+/* state machine function declare */
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+
+/* channel super queue functions */
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_comp_and_enqueue(mm_channel_t *ch_obj,
+ mm_channel_queue_t * queue,
+ mm_camera_buf_info_t *buf);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(
+ mm_channel_queue_t * queue, mm_channel_t *ch_obj);
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t *my_obj,
+ mm_channel_queue_t *queue);
+int32_t mm_channel_superbuf_skip(mm_channel_t *my_obj,
+ mm_channel_queue_t *queue);
+
+static int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+ mm_camera_generic_cmd_t *p_gen_cmd);
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue);
+
+/* Start of Frame Sync util methods */
+void mm_frame_sync_reset();
+int32_t mm_frame_sync_register_channel(mm_channel_t *ch_obj);
+int32_t mm_frame_sync_unregister_channel(mm_channel_t *ch_obj);
+int32_t mm_frame_sync_add(uint32_t frame_id, mm_channel_t *ch_obj);
+int32_t mm_frame_sync_remove(uint32_t frame_id);
+uint32_t mm_frame_sync_find_matched(uint8_t oldest);
+int8_t mm_frame_sync_find_frame_index(uint32_t frame_id);
+void mm_frame_sync_lock_queues();
+void mm_frame_sync_unlock_queues();
+void mm_channel_node_qbuf(mm_channel_t *ch_obj, mm_channel_queue_node_t *node);
+/* End of Frame Sync Util methods */
+void mm_channel_send_super_buf(mm_channel_node_info_t *info);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_frame_internal(
+ mm_channel_queue_t * queue, uint32_t frame_idx);
+
+/*===========================================================================
+ * FUNCTION : mm_channel_util_get_stream_by_handler
+ *
+ * DESCRIPTION: utility function to get a stream object from its handle
+ *
+ * PARAMETERS :
+ * @cam_obj: ptr to a channel object
+ * @handler: stream handle
+ *
+ * RETURN : ptr to a stream object.
+ * NULL if failed.
+ *==========================================================================*/
+mm_stream_t * mm_channel_util_get_stream_by_handler(
+ mm_channel_t * ch_obj,
+ uint32_t handler)
+{
+ int i;
+ mm_stream_t *s_obj = NULL;
+ for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
+ (handler == ch_obj->streams[i].my_hdl)) {
+ s_obj = &ch_obj->streams[i];
+ break;
+ }
+ }
+ return s_obj;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_dispatch_super_buf
+ *
+ * DESCRIPTION: dispatch super buffer of bundle to registered user
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing matched super buf information
+ * @userdata: user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_channel_dispatch_super_buf(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ mm_channel_t * my_obj = (mm_channel_t *)user_data;
+
+ if (NULL == my_obj) {
+ return;
+ }
+
+ if (MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB != cmd_cb->cmd_type) {
+ LOGE("Wrong cmd_type (%d) for super buf dataCB",
+ cmd_cb->cmd_type);
+ return;
+ }
+
+ if (my_obj->bundle.super_buf_notify_cb) {
+ my_obj->bundle.super_buf_notify_cb(&cmd_cb->u.superbuf, my_obj->bundle.user_data);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_process_stream_buf
+ *
+ * DESCRIPTION: handle incoming buffer from stream in a bundle. In this function,
+ * matching logic will be performed on incoming stream frames.
+ * Will depends on the bundle attribute, either storing matched frames
+ * in the superbuf queue, or sending matched superbuf frames to upper
+ * layer through registered callback.
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing matched super buf information
+ * @userdata: user data ptr
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_channel_process_stream_buf(mm_camera_cmdcb_t * cmd_cb,
+ void *user_data)
+{
+ mm_camera_super_buf_notify_mode_t notify_mode;
+ mm_channel_queue_node_t *node = NULL;
+ mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+ uint32_t i = 0;
+ /* Set expected frame id to a future frame idx, large enough to wait
+ * for good_frame_idx_range, and small enough to still capture an image */
+ uint8_t needStartZSL = FALSE;
+
+ if (NULL == ch_obj) {
+ return;
+ }
+ if (MM_CAMERA_CMD_TYPE_DATA_CB == cmd_cb->cmd_type) {
+ /* comp_and_enqueue */
+ mm_channel_superbuf_comp_and_enqueue(
+ ch_obj,
+ &ch_obj->bundle.superbuf_queue,
+ &cmd_cb->u.buf);
+ } else if (MM_CAMERA_CMD_TYPE_REQ_DATA_CB == cmd_cb->cmd_type) {
+ /* skip frames if needed */
+ ch_obj->pending_cnt = cmd_cb->u.req_buf.num_buf_requested;
+ ch_obj->pending_retro_cnt = cmd_cb->u.req_buf.num_retro_buf_requested;
+ ch_obj->req_type = cmd_cb->u.req_buf.type;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+
+ LOGH("pending cnt (%d), retro count (%d)"
+ "req_type (%d) is_primary (%d)",
+ ch_obj->pending_cnt, ch_obj->pending_retro_cnt,
+ ch_obj->req_type, cmd_cb->u.req_buf.primary_only);
+ if (!ch_obj->pending_cnt || (ch_obj->pending_retro_cnt > ch_obj->pending_cnt)) {
+ ch_obj->pending_retro_cnt = ch_obj->pending_cnt;
+ }
+ if (ch_obj->pending_retro_cnt > 0) {
+ LOGL("Resetting need Led Flash!!!");
+ ch_obj->needLEDFlash = 0;
+ }
+ ch_obj->stopZslSnapshot = 0;
+ ch_obj->unLockAEC = 0;
+
+ mm_channel_superbuf_skip(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+ } else if (MM_CAMERA_CMD_TYPE_START_ZSL == cmd_cb->cmd_type) {
+ ch_obj->manualZSLSnapshot = TRUE;
+ mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+ } else if (MM_CAMERA_CMD_TYPE_STOP_ZSL == cmd_cb->cmd_type) {
+ ch_obj->manualZSLSnapshot = FALSE;
+ mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+ } else if (MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY == cmd_cb->cmd_type) {
+ ch_obj->bundle.superbuf_queue.attr.notify_mode = cmd_cb->u.notify_mode;
+ } else if (MM_CAMERA_CMD_TYPE_FLUSH_QUEUE == cmd_cb->cmd_type) {
+ ch_obj->bundle.superbuf_queue.expected_frame_id = cmd_cb->u.flush_cmd.frame_idx;
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, cmd_cb->u.flush_cmd.stream_type);
+ cam_sem_post(&(ch_obj->cmd_thread.sync_sem));
+ return;
+ } else if (MM_CAMERA_CMD_TYPE_GENERAL == cmd_cb->cmd_type) {
+ LOGH("MM_CAMERA_CMD_TYPE_GENERAL");
+ switch (cmd_cb->u.gen_cmd.type) {
+ case MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING:
+ case MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMDTYPE_AF_BRACKETING %u",
+ start);
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+ if (start) {
+ LOGH("need AE bracketing, start zsl snapshot");
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX;
+ } else {
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+ }
+ }
+ break;
+ case MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMDTYPE_FLASH_BRACKETING %u",
+ start);
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+ if (start) {
+ LOGH("need flash bracketing");
+ ch_obj->isFlashBracketingEnabled = TRUE;
+ } else {
+ ch_obj->isFlashBracketingEnabled = FALSE;
+ }
+ }
+ break;
+ case MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X %u",
+ start);
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+ if (start) {
+ LOGH("need zoom 1x frame");
+ ch_obj->isZoom1xFrameRequested = TRUE;
+ } else {
+ ch_obj->isZoom1xFrameRequested = FALSE;
+ }
+ }
+ break;
+ case MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING: {
+ uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+ LOGI("MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING %u num_batch = %d",
+ start, cmd_cb->u.gen_cmd.frame_config.num_batch);
+
+ if (start) {
+ memset(&ch_obj->frameConfig, 0, sizeof(cam_capture_frame_config_t));
+ for (i = 0; i < cmd_cb->u.gen_cmd.frame_config.num_batch; i++) {
+ if (cmd_cb->u.gen_cmd.frame_config.configs[i].type
+ != CAM_CAPTURE_RESET) {
+ ch_obj->frameConfig.configs[
+ ch_obj->frameConfig.num_batch] =
+ cmd_cb->u.gen_cmd.frame_config.configs[i];
+ ch_obj->frameConfig.num_batch++;
+ LOGH("capture setting frame = %d type = %d",
+ i,ch_obj->frameConfig.configs[
+ ch_obj->frameConfig.num_batch].type);
+ }
+ }
+ LOGD("Capture setting Batch Count %d",
+ ch_obj->frameConfig.num_batch);
+ ch_obj->isConfigCapture = TRUE;
+ } else {
+ ch_obj->isConfigCapture = FALSE;
+ memset(&ch_obj->frameConfig, 0, sizeof(cam_capture_frame_config_t));
+ }
+ ch_obj->cur_capture_idx = 0;
+ memset(ch_obj->capture_frame_id, 0, sizeof(uint8_t) * MAX_CAPTURE_BATCH_NUM);
+ break;
+ }
+ default:
+ LOGE("Error: Invalid command");
+ break;
+ }
+ }
+ notify_mode = ch_obj->bundle.superbuf_queue.attr.notify_mode;
+
+ /*Handle use case which does not need start ZSL even in unified case*/
+ if ((ch_obj->pending_cnt > 0)
+ && (ch_obj->isConfigCapture)
+ && (ch_obj->manualZSLSnapshot == FALSE)
+ && (ch_obj->startZSlSnapshotCalled == FALSE)) {
+ needStartZSL = TRUE;
+ for (i = ch_obj->cur_capture_idx;
+ i < ch_obj->frameConfig.num_batch;
+ i++) {
+ cam_capture_type type = ch_obj->frameConfig.configs[i].type;
+ if (((type == CAM_CAPTURE_FLASH) && (!ch_obj->needLEDFlash))
+ || ((type == CAM_CAPTURE_LOW_LIGHT) && (!ch_obj->needLowLightZSL))) {
+ /*For flash and low light capture, start ZSL is triggered only if needed*/
+ needStartZSL = FALSE;
+ break;
+ }
+ }
+ }
+
+ if ((ch_obj->isConfigCapture)
+ && (needStartZSL)) {
+ for (i = ch_obj->cur_capture_idx;
+ i < ch_obj->frameConfig.num_batch;
+ i++) {
+ ch_obj->capture_frame_id[i] =
+ ch_obj->bundle.superbuf_queue.expected_frame_id
+ + MM_CAMERA_MAX_FUTURE_FRAME_WAIT;
+ }
+
+ /* Need to Flush the queue and trigger frame config */
+ mm_channel_superbuf_flush(ch_obj,
+ &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+ LOGI("TRIGGER Start ZSL");
+ mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+ ch_obj->startZSlSnapshotCalled = TRUE;
+ ch_obj->burstSnapNum = ch_obj->pending_cnt;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ } else if ((ch_obj->pending_cnt > 0)
+ && ((ch_obj->needLEDFlash == TRUE) ||
+ (MM_CHANNEL_BRACKETING_STATE_OFF != ch_obj->bracketingState))
+ && (ch_obj->manualZSLSnapshot == FALSE)
+ && ch_obj->startZSlSnapshotCalled == FALSE) {
+
+ LOGI("TRIGGER Start ZSL for Flash");
+ mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+ ch_obj->startZSlSnapshotCalled = TRUE;
+ ch_obj->burstSnapNum = ch_obj->pending_cnt;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ } else if (((ch_obj->pending_cnt == 0) || (ch_obj->stopZslSnapshot == 1))
+ && (ch_obj->manualZSLSnapshot == FALSE)
+ && (ch_obj->startZSlSnapshotCalled == TRUE)) {
+ LOGI("TRIGGER Stop ZSL for cancel picture");
+ mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+ // Unlock AEC
+ ch_obj->startZSlSnapshotCalled = FALSE;
+ ch_obj->needLEDFlash = FALSE;
+ ch_obj->burstSnapNum = 0;
+ ch_obj->stopZslSnapshot = 0;
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ ch_obj->unLockAEC = 1;
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+ ch_obj->isConfigCapture = FALSE;
+ }
+ /* bufdone for overflowed bufs */
+ mm_channel_superbuf_bufdone_overflow(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+ LOGD("Super Buffer received, pending_cnt=%d queue cnt = %d expected = %d",
+ ch_obj->pending_cnt, ch_obj->bundle.superbuf_queue.match_cnt,
+ ch_obj->bundle.superbuf_queue.expected_frame_id);
+
+ /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
+ while (((ch_obj->pending_cnt > 0) ||
+ (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode)) &&
+ (!ch_obj->bWaitForPrepSnapshotDone)) {
+
+ /* dequeue */
+ mm_channel_node_info_t info;
+ memset(&info, 0x0, sizeof(info));
+
+ if (ch_obj->req_type == MM_CAMERA_REQ_FRAME_SYNC_BUF) {
+ // Lock the Queues
+ mm_frame_sync_lock_queues();
+ uint32_t match_frame = mm_frame_sync_find_matched(FALSE);
+ if (match_frame) {
+ uint8_t j = 0;
+ for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+ if (fs.ch_obj[j]) {
+ mm_channel_queue_t *ch_queue =
+ &fs.ch_obj[j]->bundle.superbuf_queue;
+ if (ch_queue == NULL) {
+ LOGW("Channel queue is NULL");
+ break;
+ }
+ node = mm_channel_superbuf_dequeue_frame_internal(
+ ch_queue, match_frame);
+ if (node != NULL) {
+ info.ch_obj[info.num_nodes] = fs.ch_obj[j];
+ info.node[info.num_nodes] = node;
+ info.num_nodes++;
+ LOGH("Added ch(%p) to node ,num nodes %d",
+ fs.ch_obj[j], info.num_nodes);
+ }
+ }
+ }
+ mm_frame_sync_remove(match_frame);
+ LOGI("match frame %d", match_frame);
+ if (info.num_nodes != fs.num_cam) {
+ LOGI("num node %d != num cam (%d) Debug this",
+ info.num_nodes, fs.num_cam);
+ uint8_t j = 0;
+ // free super buffers from various nodes
+ for (j = 0; j < info.num_nodes; j++) {
+ if (info.node[j]) {
+ mm_channel_node_qbuf(info.ch_obj[j], info.node[j]);
+ free(info.node[j]);
+ }
+ }
+ // we should not use it as matched dual camera frames
+ info.num_nodes = 0;
+ }
+ }
+ mm_frame_sync_unlock_queues();
+ } else {
+ node = mm_channel_superbuf_dequeue(&ch_obj->bundle.superbuf_queue, ch_obj);
+ if (node != NULL) {
+ if (ch_obj->isConfigCapture &&
+ ((node->frame_idx <
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx]))) {
+ uint8_t i;
+ LOGD("Not expected super buffer. frameID = %d expected = %d",
+ node->frame_idx, ch_obj->capture_frame_id[ch_obj->cur_capture_idx]);
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ free(node);
+ } else {
+ info.num_nodes = 1;
+ info.ch_obj[0] = ch_obj;
+ info.node[0] = node;
+ }
+ }
+ }
+ if (info.num_nodes > 0) {
+ /* decrease pending_cnt */
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+ ch_obj->pending_cnt--;
+ if (ch_obj->pending_retro_cnt > 0) {
+ if (ch_obj->pending_retro_cnt == 1) {
+ ch_obj->bWaitForPrepSnapshotDone = 1;
+ }
+ ch_obj->pending_retro_cnt--;
+ }
+
+ if (((ch_obj->pending_cnt == 0) ||
+ (ch_obj->stopZslSnapshot == 1)) &&
+ (ch_obj->manualZSLSnapshot == FALSE) &&
+ ch_obj->startZSlSnapshotCalled == TRUE) {
+ LOGI("TRIGGER Stop ZSL. All frame received");
+ mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+ ch_obj->startZSlSnapshotCalled = FALSE;
+ ch_obj->burstSnapNum = 0;
+ ch_obj->stopZslSnapshot = 0;
+ ch_obj->unLockAEC = 1;
+ ch_obj->needLEDFlash = FALSE;
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+ ch_obj->isConfigCapture = FALSE;
+ }
+
+ if (ch_obj->isConfigCapture) {
+ if (ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames != 0) {
+ ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames--;
+ } else {
+ LOGW("Invalid frame config batch index %d max batch = %d",
+ ch_obj->cur_capture_idx, ch_obj->frameConfig.num_batch);
+ }
+
+ if (ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames == 0) {
+ //Received all frames for current batch
+ ch_obj->cur_capture_idx++;
+ ch_obj->bundle.superbuf_queue.expected_frame_id =
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+ ch_obj->bundle.superbuf_queue.good_frame_id =
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+ } else {
+ LOGH("Need %d frames more for batch %d",
+ ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames,
+ ch_obj->cur_capture_idx);
+ }
+ }
+ }
+ /* dispatch superbuf */
+ mm_channel_send_super_buf(&info);
+ } else {
+ /* no superbuf avail, break the loop */
+ break;
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_send_super_buf
+ *
+ * DESCRIPTION: Send super buffers to HAL
+ *
+ * PARAMETERS :
+ * @info : Info of super buffers to be sent in callback
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_channel_send_super_buf(mm_channel_node_info_t *info)
+{
+ if (!info || !info->num_nodes){
+ LOGE("X Error!! Info invalid");
+ return;
+ }
+ mm_channel_queue_node_t *node = NULL;
+
+ LOGH("num nodes %d to send", info->num_nodes);
+ uint32_t idx = 0;
+ mm_channel_t *ch_obj = NULL;
+ for (idx = 0; idx < info->num_nodes; idx++) {
+ node = info->node[idx];
+ ch_obj = info->ch_obj[idx];
+ if ((ch_obj) && (NULL != ch_obj->bundle.super_buf_notify_cb) && node) {
+ mm_camera_cmdcb_t* cb_node = NULL;
+ LOGD("Send superbuf to HAL, pending_cnt=%d",
+ ch_obj->pending_cnt);
+ /* send cam_sem_post to wake up cb thread to dispatch super buffer */
+ cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != cb_node) {
+ memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+ cb_node->cmd_type = MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB;
+ cb_node->u.superbuf.num_bufs = node->num_of_bufs;
+ uint8_t i = 0;
+ for (i = 0; i < node->num_of_bufs; i++) {
+ cb_node->u.superbuf.bufs[i] = node->super_buf[i].buf;
+ }
+ cb_node->u.superbuf.camera_handle = ch_obj->cam_obj->my_hdl;
+ cb_node->u.superbuf.ch_id = ch_obj->my_hdl;
+ cb_node->u.superbuf.bReadyForPrepareSnapshot =
+ ch_obj->bWaitForPrepSnapshotDone;
+ if (ch_obj->unLockAEC == 1) {
+ cb_node->u.superbuf.bUnlockAEC = 1;
+ LOGH("Unlocking AEC");
+ ch_obj->unLockAEC = 0;
+ }
+ /* enqueue to cb thread */
+ cam_queue_enq(&(ch_obj->cb_thread.cmd_queue), cb_node);
+ /* wake up cb thread */
+ cam_sem_post(&(ch_obj->cb_thread.cmd_sem));
+ LOGH("Sent super buf for node[%d] ", idx);
+
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ /* buf done with the unused super buf */
+ uint8_t i = 0;
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ }
+ free(node);
+ } else if ((ch_obj != NULL) && (node != NULL)) {
+ /* buf done with the unused super buf */
+ uint8_t i;
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ free(node);
+ } else {
+ LOGE("node is NULL, debug this");
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_reg_stream_buf_cb
+ *
+ * DESCRIPTION: Register callback for stream buffer
+ *
+ * PARAMETERS :
+ * @my_obj : Channel object
+ * @stream_id : stream that will be linked
+ * @buf_cb : special callback needs to be registered for stream buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_reg_stream_buf_cb (mm_channel_t* my_obj,
+ uint32_t stream_id, mm_stream_data_cb_t buf_cb)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ stream_id);
+
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+ rc = mm_stream_reg_buf_cb(s_obj, buf_cb);
+ }
+
+ return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn
+ *
+ * DESCRIPTION: channel finite state machine entry function. Depends on channel
+ * state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ LOGD("E state = %d", my_obj->state);
+ switch (my_obj->state) {
+ case MM_CHANNEL_STATE_NOTUSED:
+ rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_STOPPED:
+ rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_ACTIVE:
+ rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_PAUSED:
+ rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
+ break;
+ default:
+ LOGD("Not a valid state (%d)", my_obj->state);
+ break;
+ }
+
+ /* unlock ch_lock */
+ pthread_mutex_unlock(&my_obj->ch_lock);
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_notused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in NOT_USED state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ switch (evt) {
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_stopped
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in STOPPED state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E evt = %d", evt);
+ switch (evt) {
+ case MM_CHANNEL_EVT_ADD_STREAM:
+ {
+ uint32_t s_hdl = 0;
+ s_hdl = mm_channel_add_stream(my_obj);
+ *((uint32_t*)out_val) = s_hdl;
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_LINK_STREAM:
+ {
+ mm_camera_stream_link_t *stream_link = NULL;
+ uint32_t s_hdl = 0;
+ stream_link = (mm_camera_stream_link_t *) in_val;
+ s_hdl = mm_channel_link_stream(my_obj, stream_link);
+ *((uint32_t*)out_val) = s_hdl;
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_DEL_STREAM:
+ {
+ uint32_t s_id = *((uint32_t *)in_val);
+ rc = mm_channel_del_stream(my_obj, s_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_START:
+ {
+ rc = mm_channel_start(my_obj);
+ /* first stream started in stopped state
+ * move to active state */
+ if (0 == rc) {
+ my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_CONFIG_STREAM:
+ {
+ mm_evt_paylod_config_stream_t *payload =
+ (mm_evt_paylod_config_stream_t *)in_val;
+ rc = mm_channel_config_stream(my_obj,
+ payload->stream_id,
+ payload->config);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
+ {
+ cam_bundle_config_t *payload =
+ (cam_bundle_config_t *)in_val;
+ rc = mm_channel_get_bundle_info(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_DELETE:
+ {
+ mm_channel_release(my_obj);
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_SET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_set_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+ {
+ uint32_t stream_id = *((uint32_t *)in_val);
+ rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+ {
+ mm_evt_paylod_do_stream_action_t *payload =
+ (mm_evt_paylod_do_stream_action_t *)in_val;
+ rc = mm_channel_do_stream_action(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+ {
+ cam_buf_map_type *payload =
+ (cam_buf_map_type *)in_val;
+ rc = mm_channel_map_stream_buf(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
+ {
+ cam_buf_map_type_list *payload =
+ (cam_buf_map_type_list *)in_val;
+ rc = mm_channel_map_stream_bufs(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+ {
+ cam_buf_unmap_type *payload =
+ (cam_buf_unmap_type *)in_val;
+ rc = mm_channel_unmap_stream_buf(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
+ {
+ mm_evt_paylod_reg_stream_buf_cb *payload =
+ (mm_evt_paylod_reg_stream_buf_cb *)in_val;
+ rc = mm_channel_reg_stream_buf_cb (my_obj,
+ payload->stream_id, payload->buf_cb);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d)",
+ my_obj->state, evt);
+ break;
+ }
+ LOGD("E rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_active
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in ACTIVE state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ LOGD("E evt = %d", evt);
+ switch (evt) {
+ case MM_CHANNEL_EVT_STOP:
+ {
+ rc = mm_channel_stop(my_obj);
+ my_obj->state = MM_CHANNEL_STATE_STOPPED;
+ }
+ break;
+ case MM_CHANNEL_EVT_REQUEST_SUPER_BUF:
+ {
+ mm_camera_req_buf_t *payload =
+ (mm_camera_req_buf_t *)in_val;
+ rc = mm_channel_request_super_buf(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF:
+ {
+ rc = mm_channel_cancel_super_buf_request(my_obj);
+ }
+ break;
+ case MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE:
+ {
+ uint32_t frame_idx = *((uint32_t *)in_val);
+ rc = mm_channel_flush_super_buf_queue(my_obj, frame_idx, CAM_STREAM_TYPE_DEFAULT);
+ }
+ break;
+ case MM_CHANNEL_EVT_START_ZSL_SNAPSHOT:
+ {
+ rc = mm_channel_start_zsl_snapshot(my_obj);
+ }
+ break;
+ case MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT:
+ {
+ rc = mm_channel_stop_zsl_snapshot(my_obj);
+ }
+ break;
+ case MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE:
+ {
+ mm_camera_super_buf_notify_mode_t notify_mode =
+ *((mm_camera_super_buf_notify_mode_t *)in_val);
+ rc = mm_channel_config_notify_mode(my_obj, notify_mode);
+ }
+ break;
+ case MM_CHANNEL_EVT_SET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_set_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+ {
+ uint32_t stream_id = *((uint32_t *)in_val);
+ rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+ {
+ mm_evt_paylod_do_stream_action_t *payload =
+ (mm_evt_paylod_do_stream_action_t *)in_val;
+ rc = mm_channel_do_stream_action(my_obj, payload);
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+ {
+ cam_buf_map_type *payload =
+ (cam_buf_map_type *)in_val;
+ if (payload != NULL) {
+ uint8_t type = payload->type;
+ if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+ (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+ rc = mm_channel_map_stream_buf(my_obj, payload);
+ }
+ } else {
+ LOGE("cannot map regualr stream buf in active state");
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
+ {
+ cam_buf_map_type_list *payload =
+ (cam_buf_map_type_list *)in_val;
+ if ((payload != NULL) && (payload->length > 0)) {
+ uint8_t type = payload->buf_maps[0].type;
+ if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+ (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+ rc = mm_channel_map_stream_bufs(my_obj, payload);
+ }
+ } else {
+ LOGE("cannot map regualr stream buf in active state");
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+ {
+ cam_buf_unmap_type *payload =
+ (cam_buf_unmap_type *)in_val;
+ if (payload != NULL) {
+ uint8_t type = payload->type;
+ if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+ (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+ rc = mm_channel_unmap_stream_buf(my_obj, payload);
+ }
+ } else {
+ LOGE("cannot unmap regualr stream buf in active state");
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_AF_BRACKETING:
+ {
+ LOGH("MM_CHANNEL_EVT_AF_BRACKETING");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_AE_BRACKETING:
+ {
+ LOGH("MM_CHANNEL_EVT_AE_BRACKETING");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_FLASH_BRACKETING:
+ {
+ LOGH("MM_CHANNEL_EVT_FLASH_BRACKETING");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_ZOOM_1X:
+ {
+ LOGH("MM_CHANNEL_EVT_ZOOM_1X");
+ uint32_t start_flag = *((uint32_t *)in_val);
+ mm_camera_generic_cmd_t gen_cmd;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X;
+ gen_cmd.payload[0] = start_flag;
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CAMERA_EVT_CAPTURE_SETTING:
+ {
+ mm_camera_generic_cmd_t gen_cmd;
+ cam_capture_frame_config_t *input;
+ gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING;
+ LOGH("MM_CAMERA_EVT_CAPTURE_SETTING");
+ if (in_val == NULL) {
+ gen_cmd.payload[0] = 0;
+ memset(&gen_cmd.frame_config, 0, sizeof(cam_capture_frame_config_t));
+ } else {
+ gen_cmd.payload[0] = 1;
+ input = (cam_capture_frame_config_t *)in_val;
+ gen_cmd.frame_config = *input;
+ }
+ rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+ }
+ break;
+ case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
+ {
+ mm_evt_paylod_reg_stream_buf_cb *payload =
+ (mm_evt_paylod_reg_stream_buf_cb *)in_val;
+ rc = mm_channel_reg_stream_buf_cb (my_obj,
+ payload->stream_id, payload->buf_cb);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ break;
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_fsm_fn_paused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ * in PAUSED state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a channel object
+ * @evt : channel event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ /* currently we are not supporting pause/resume channel */
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_init
+ *
+ * DESCRIPTION: initialize a channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object be to initialized
+ * @attr : bundle attribute of the channel if needed
+ * @channel_cb : callback function for bundle data notify
+ * @userdata : user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if no bundle data notify is needed, meaning each stream in the
+ * channel will have its own stream data notify callback, then
+ * attr, channel_cb, and userdata can be NULL. In this case,
+ * no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+int32_t mm_channel_init(mm_channel_t *my_obj,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ int32_t rc = 0;
+
+ my_obj->bundle.super_buf_notify_cb = channel_cb;
+ my_obj->bundle.user_data = userdata;
+ if (NULL != attr) {
+ my_obj->bundle.superbuf_queue.attr = *attr;
+ }
+
+ LOGD("Launch data poll thread in channel open");
+ snprintf(my_obj->poll_thread[0].threadName, THREAD_NAME_SIZE, "CAM_dataPoll");
+ mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
+ MM_CAMERA_POLL_TYPE_DATA);
+
+ /* change state to stopped state */
+ my_obj->state = MM_CHANNEL_STATE_STOPPED;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_release
+ *
+ * DESCRIPTION: release a channel resource. Channel state will move to UNUSED
+ * state after this call.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_channel_release(mm_channel_t *my_obj)
+{
+ /* stop data poll thread */
+ mm_camera_poll_thread_release(&my_obj->poll_thread[0]);
+
+ /* memset bundle info */
+ memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));
+
+ /* change state to notused state */
+ my_obj->state = MM_CHANNEL_STATE_NOTUSED;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_link_stream
+ *
+ * DESCRIPTION: link a stream from external channel into this channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_link : channel and stream to be linked
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+ mm_camera_stream_link_t *stream_link)
+{
+ uint8_t idx = 0;
+ uint32_t s_hdl = 0;
+ mm_stream_t *stream_obj = NULL;
+ mm_stream_t *stream = NULL;
+
+ if (NULL == stream_link) {
+ LOGE("Invalid stream link");
+ return 0;
+ }
+
+ stream = mm_channel_util_get_stream_by_handler(stream_link->ch,
+ stream_link->stream_id);
+ if (NULL == stream) {
+ return 0;
+ }
+
+ /* check available stream */
+ for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+ if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+ stream_obj = &my_obj->streams[idx];
+ break;
+ }
+ }
+ if (NULL == stream_obj) {
+ LOGE("streams reach max, no more stream allowed to add");
+ return s_hdl;
+ }
+
+ /* initialize stream object */
+ *stream_obj = *stream;
+ stream_obj->linked_stream = stream;
+ s_hdl = stream->my_hdl;
+
+ LOGD("stream handle = %d", s_hdl);
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_add_stream
+ *
+ * DESCRIPTION: add a stream into the channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ uint8_t idx = 0;
+ uint32_t s_hdl = 0;
+ mm_stream_t *stream_obj = NULL;
+
+ LOGD("E");
+ /* check available stream */
+ for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+ if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+ stream_obj = &my_obj->streams[idx];
+ break;
+ }
+ }
+ if (NULL == stream_obj) {
+ LOGE("streams reach max, no more stream allowed to add");
+ return s_hdl;
+ }
+
+ /* initialize stream object */
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+ stream_obj->fd = -1;
+ stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
+ stream_obj->ch_obj = my_obj;
+ pthread_mutex_init(&stream_obj->buf_lock, NULL);
+ pthread_mutex_init(&stream_obj->cb_lock, NULL);
+ pthread_mutex_init(&stream_obj->cmd_lock, NULL);
+ pthread_cond_init(&stream_obj->buf_cond, NULL);
+ memset(stream_obj->buf_status, 0,
+ sizeof(stream_obj->buf_status));
+ stream_obj->state = MM_STREAM_STATE_INITED;
+
+ /* acquire stream */
+ rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
+ if (0 == rc) {
+ s_hdl = stream_obj->my_hdl;
+ } else {
+ /* error during acquire, de-init */
+ pthread_cond_destroy(&stream_obj->buf_cond);
+ pthread_mutex_destroy(&stream_obj->buf_lock);
+ pthread_mutex_destroy(&stream_obj->cb_lock);
+ pthread_mutex_destroy(&stream_obj->cmd_lock);
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+ }
+ LOGD("stream handle = %d", s_hdl);
+ return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_del_stream
+ *
+ * DESCRIPTION: delete a stream from the channel bu its handle
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_id : stream handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : assume steam is stooped before it can be deleted
+ *==========================================================================*/
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+ uint32_t stream_id)
+{
+ int rc = -1;
+ mm_stream_t * stream_obj = NULL;
+ stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL == stream_obj) {
+ LOGE("Invalid Stream Object for stream_id = %d", stream_id);
+ return rc;
+ }
+
+ if (stream_obj->ch_obj != my_obj) {
+ /* Only unlink stream */
+ pthread_mutex_lock(&stream_obj->linked_stream->buf_lock);
+ stream_obj->linked_stream->is_linked = 0;
+ stream_obj->linked_stream->linked_obj = NULL;
+ pthread_mutex_unlock(&stream_obj->linked_stream->buf_lock);
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(stream_obj,
+ MM_STREAM_EVT_RELEASE,
+ NULL,
+ NULL);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_id : stream handle
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int rc = -1;
+ mm_stream_t * stream_obj = NULL;
+ LOGD("E stream ID = %d", stream_id);
+ stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL == stream_obj) {
+ LOGE("Invalid Stream Object for stream_id = %d", stream_id);
+ return rc;
+ }
+
+ if (stream_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ /* set stream fmt */
+ rc = mm_stream_fsm_fn(stream_obj,
+ MM_STREAM_EVT_SET_FMT,
+ (void *)config,
+ NULL);
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel, which should include all
+ * streams within this channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @bundle_info : bundle info to be filled in
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+ cam_bundle_config_t *bundle_info)
+{
+ int i;
+ mm_stream_t *s_obj = NULL;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+ int32_t rc = 0;
+
+ memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+ bundle_info->bundle_id = my_obj->my_hdl;
+ bundle_info->num_of_streams = 0;
+ for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if (my_obj->streams[i].my_hdl > 0) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ my_obj->streams[i].my_hdl);
+ if (NULL != s_obj) {
+ stream_type = s_obj->stream_info->stream_type;
+ if ((CAM_STREAM_TYPE_METADATA != stream_type) &&
+ (s_obj->ch_obj == my_obj)) {
+ bundle_info->stream_ids[bundle_info->num_of_streams++] =
+ s_obj->server_stream_id;
+ }
+ } else {
+ LOGE("cannot find stream obj (%d) by handler (%d)",
+ i, my_obj->streams[i].my_hdl);
+ rc = -1;
+ break;
+ }
+ }
+ }
+ if (rc != 0) {
+ /* error, reset to 0 */
+ memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_start
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ int i = 0, j = 0;
+ mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+ uint8_t num_streams_to_start = 0;
+ uint8_t num_streams_in_bundle_queue = 0;
+ mm_stream_t *s_obj = NULL;
+ int meta_stream_idx = 0;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+ for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if (my_obj->streams[i].my_hdl > 0) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ my_obj->streams[i].my_hdl);
+ if (NULL != s_obj) {
+ stream_type = s_obj->stream_info->stream_type;
+ /* remember meta data stream index */
+ if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
+ (s_obj->ch_obj == my_obj)) {
+ meta_stream_idx = num_streams_to_start;
+ }
+ s_objs[num_streams_to_start++] = s_obj;
+
+ if (!s_obj->stream_info->noFrameExpected) {
+ num_streams_in_bundle_queue++;
+ }
+ }
+ }
+ }
+
+ if (meta_stream_idx > 0 ) {
+ /* always start meta data stream first, so switch the stream object with the first one */
+ s_obj = s_objs[0];
+ s_objs[0] = s_objs[meta_stream_idx];
+ s_objs[meta_stream_idx] = s_obj;
+ }
+
+ if (NULL != my_obj->bundle.super_buf_notify_cb) {
+ /* need to send up cb, therefore launch thread */
+ /* init superbuf queue */
+ mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
+ my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
+ my_obj->bundle.superbuf_queue.expected_frame_id =
+ my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
+ my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
+ my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
+ my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
+ my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
+ my_obj->bundle.superbuf_queue.good_frame_id = 0;
+
+ for (i = 0; i < num_streams_to_start; i++) {
+ /* Only bundle streams that belong to the channel */
+ if(!(s_objs[i]->stream_info->noFrameExpected)) {
+ if (s_objs[i]->ch_obj == my_obj) {
+ /* set bundled flag to streams */
+ s_objs[i]->is_bundled = 1;
+ }
+ my_obj->bundle.superbuf_queue.bundled_streams[j++] = s_objs[i]->my_hdl;
+ }
+ }
+
+ /* launch cb thread for dispatching super buf through cb */
+ snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
+ mm_camera_cmd_thread_launch(&my_obj->cb_thread,
+ mm_channel_dispatch_super_buf,
+ (void*)my_obj);
+
+ /* launch cmd thread for super buf dataCB */
+ snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+ mm_channel_process_stream_buf,
+ (void*)my_obj);
+
+ /* set flag to TRUE */
+ my_obj->bundle.is_active = TRUE;
+ }
+
+ /* link any streams first before starting the rest of the streams */
+ for (i = 0; i < num_streams_to_start; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+ s_objs[i]->linked_stream->linked_obj = my_obj;
+ s_objs[i]->linked_stream->is_linked = 1;
+ pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+ continue;
+ }
+ }
+
+ for (i = 0; i < num_streams_to_start; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ continue;
+ }
+ /* all streams within a channel should be started at the same time */
+ if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
+ LOGE("stream already started idx(%d)", i);
+ rc = -1;
+ break;
+ }
+
+ /* allocate buf */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_GET_BUF,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ LOGE("get buf failed at idx(%d)", i);
+ break;
+ }
+
+ /* reg buf */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_REG_BUF,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ LOGE("reg buf failed at idx(%d)", i);
+ break;
+ }
+
+ /* start stream */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_START,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ LOGE("start stream failed at idx(%d)", i);
+ break;
+ }
+ }
+
+ /* error handling */
+ if (0 != rc) {
+ /* unlink the streams first */
+ for (j = 0; j < num_streams_to_start; j++) {
+ if (s_objs[j]->ch_obj != my_obj) {
+ pthread_mutex_lock(&s_objs[j]->linked_stream->buf_lock);
+ s_objs[j]->linked_stream->is_linked = 0;
+ s_objs[j]->linked_stream->linked_obj = NULL;
+ pthread_mutex_unlock(&s_objs[j]->linked_stream->buf_lock);
+
+ if (TRUE == my_obj->bundle.is_active) {
+ mm_channel_flush_super_buf_queue(my_obj, 0,
+ s_objs[i]->stream_info->stream_type);
+ }
+ memset(s_objs[j], 0, sizeof(mm_stream_t));
+ continue;
+ }
+ }
+
+ for (j = 0; j <= i; j++) {
+ if ((NULL == s_objs[j]) || (s_objs[j]->ch_obj != my_obj)) {
+ continue;
+ }
+ /* stop streams*/
+ mm_stream_fsm_fn(s_objs[j],
+ MM_STREAM_EVT_STOP,
+ NULL,
+ NULL);
+
+ /* unreg buf */
+ mm_stream_fsm_fn(s_objs[j],
+ MM_STREAM_EVT_UNREG_BUF,
+ NULL,
+ NULL);
+
+ /* put buf back */
+ mm_stream_fsm_fn(s_objs[j],
+ MM_STREAM_EVT_PUT_BUF,
+ NULL,
+ NULL);
+ }
+
+ /* destroy super buf cmd thread */
+ if (TRUE == my_obj->bundle.is_active) {
+ /* first stop bundle thread */
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+ /* deinit superbuf queue */
+ mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+ /* memset super buffer queue info */
+ my_obj->bundle.is_active = 0;
+ memset(&my_obj->bundle.superbuf_queue, 0, sizeof(mm_channel_queue_t));
+ }
+ }
+ my_obj->bWaitForPrepSnapshotDone = 0;
+ if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ LOGH("registering Channel obj %p", my_obj);
+ mm_frame_sync_register_channel(my_obj);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ int i;
+ mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+ uint8_t num_streams_to_stop = 0;
+ mm_stream_t *s_obj = NULL;
+ int meta_stream_idx = 0;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+ if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ mm_frame_sync_unregister_channel(my_obj);
+ }
+
+ for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if (my_obj->streams[i].my_hdl > 0) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ my_obj->streams[i].my_hdl);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj == my_obj) {
+ stream_type = s_obj->stream_info->stream_type;
+ /* remember meta data stream index */
+ if (stream_type == CAM_STREAM_TYPE_METADATA) {
+ meta_stream_idx = num_streams_to_stop;
+ }
+ }
+ s_objs[num_streams_to_stop++] = s_obj;
+ }
+ }
+ }
+
+ if (meta_stream_idx < num_streams_to_stop - 1 ) {
+ /* always stop meta data stream last, so switch the stream object with the last one */
+ s_obj = s_objs[num_streams_to_stop - 1];
+ s_objs[num_streams_to_stop - 1] = s_objs[meta_stream_idx];
+ s_objs[meta_stream_idx] = s_obj;
+ }
+
+ for (i = 0; i < num_streams_to_stop; i++) {
+ /* stream that are linked to this channel should not be stopped */
+ if (s_objs[i]->ch_obj != my_obj) {
+ continue;
+ }
+
+ /* stream off */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_STOP,
+ NULL,
+ NULL);
+
+ /* unreg buf at kernel */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_UNREG_BUF,
+ NULL,
+ NULL);
+ }
+
+ for (i = 0; i < num_streams_to_stop; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ /* Only unlink stream */
+ pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+ s_objs[i]->linked_stream->is_linked = 0;
+ s_objs[i]->linked_stream->linked_obj = NULL;
+ pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+ }
+ }
+
+ /* destroy super buf cmd thread */
+ if (TRUE == my_obj->bundle.is_active) {
+ mm_channel_flush_super_buf_queue(my_obj, 0, CAM_STREAM_TYPE_DEFAULT);
+ /* first stop bundle thread */
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+ /* deinit superbuf queue */
+ mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+ /* reset few fields in the bundle info */
+ my_obj->bundle.is_active = 0;
+ my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+ my_obj->bundle.superbuf_queue.good_frame_id = 0;
+ my_obj->bundle.superbuf_queue.match_cnt = 0;
+ }
+
+ /* since all streams are stopped, we are safe to
+ * release all buffers allocated in stream */
+ for (i = 0; i < num_streams_to_stop; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ continue;
+ }
+ /* put buf back */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_PUT_BUF,
+ NULL,
+ NULL);
+ }
+
+ for (i = 0; i < num_streams_to_stop; i++) {
+ if (s_objs[i]->ch_obj != my_obj) {
+ memset(s_objs[i], 0, sizeof(mm_stream_t));
+ } else {
+ continue;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ * frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @num_buf_requested : number of matched frames needed
+ * @num_retro_buf_requested : number of retro frames needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+ mm_camera_req_buf_t *buf)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ if(!buf) {
+ LOGE("Request info buf is NULL");
+ return -1;
+ }
+
+ /* set pending_cnt
+ * will trigger dispatching super frames if pending_cnt > 0 */
+ /* send cam_sem_post to wake up cmd thread to dispatch super buffer */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+ node->u.req_buf = *buf;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ * of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ /* reset pending_cnt */
+ mm_camera_req_buf_t buf;
+ memset(&buf, 0x0, sizeof(buf));
+ buf.type = MM_CAMERA_REQ_SUPER_BUF;
+ buf.num_buf_requested = 0;
+ rc = mm_channel_request_super_buf(my_obj, &buf);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush superbuf queue
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @frame_idx : frame idx until which to flush all superbufs
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj, uint32_t frame_idx,
+ cam_stream_type_t stream_type)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_FLUSH_QUEUE;
+ node->u.flush_cmd.frame_idx = frame_idx;
+ node->u.flush_cmd.stream_type = stream_type;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+
+ /* wait for ack from cmd thread */
+ cam_sem_wait(&(my_obj->cmd_thread.sync_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_config_notify_mode
+ *
+ * DESCRIPTION: configure notification mode
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+ mm_camera_super_buf_notify_mode_t notify_mode)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->u.notify_mode = notify_mode;
+ node->cmd_type = MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_START_ZSL;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl snapshot
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_STOP_ZSL;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @buf : buf ptr to be enqueued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);
+
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* Redirect to linked stream */
+ rc = mm_stream_fsm_fn(s_obj->linked_stream,
+ MM_STREAM_EVT_QBUF,
+ (void *)buf,
+ NULL);
+ } else {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_QBUF,
+ (void *)buf,
+ NULL);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @stream_id : steam_id
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj, uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* Redirect to linked stream */
+ rc = mm_stream_fsm_fn(s_obj->linked_stream,
+ MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+ NULL,
+ NULL);
+ } else {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+ NULL,
+ NULL);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_SET_PARM,
+ (void *)payload,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+ mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_GET_PARM,
+ (void *)payload,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ * if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @s_id : stream handle
+ * @actions : ptr to an action struct buf to be performed by server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the action struct buf is already mapped to server via
+ * domain socket. Actions to be performed by server are already
+ * filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+ mm_evt_paylod_do_stream_action_t *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_DO_ACTION,
+ (void *)payload,
+ NULL);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @payload : ptr to payload for mapping
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+ cam_buf_map_type *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_map_buf(s_obj,
+ payload->type,
+ payload->frame_idx,
+ payload->plane_idx,
+ payload->fd,
+ payload->size);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @payload : ptr to payload for mapping
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_bufs(mm_channel_t *my_obj,
+ cam_buf_map_type_list *payload)
+{
+ int32_t rc = -1;
+ if ((payload == NULL) || (payload->length == 0)) {
+ return rc;
+ }
+
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->buf_maps[0].stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_map_bufs(s_obj, payload);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @payload : ptr to unmap payload
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+ cam_buf_unmap_type *payload)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+ payload->stream_id);
+ if (NULL != s_obj) {
+ if (s_obj->ch_obj != my_obj) {
+ /* No op. on linked streams */
+ return 0;
+ }
+
+ rc = mm_stream_unmap_buf(s_obj, payload->type,
+ payload->frame_idx, payload->plane_idx);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_queue_init
+ *
+ * DESCRIPTION: initialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ * @queue : ptr to superbuf queue to be initialized
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue)
+{
+ return cam_queue_init(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_queue_deinit
+ *
+ * DESCRIPTION: deinitialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ * @queue : ptr to superbuf queue to be deinitialized
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue)
+{
+ return cam_queue_deinit(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_util_seq_comp_w_rollover
+ *
+ * DESCRIPTION: utility function to handle sequence number comparison with rollover
+ *
+ * PARAMETERS :
+ * @v1 : first value to be compared
+ * @v2 : second value to be compared
+ *
+ * RETURN : int8_t type of comparison result
+ * >0 -- v1 larger than v2
+ * =0 -- vi equal to v2
+ * <0 -- v1 smaller than v2
+ *==========================================================================*/
+int8_t mm_channel_util_seq_comp_w_rollover(uint32_t v1,
+ uint32_t v2)
+{
+ int8_t ret = 0;
+
+ /* TODO: need to handle the case if v2 roll over to 0 */
+ if (v1 > v2) {
+ ret = 1;
+ } else if (v1 < v2) {
+ ret = -1;
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_validate_super_buf.
+ *
+ * DESCRIPTION: Validate incoming buffer with existing super buffer.
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @queue : superbuf queue
+ * @buf_info: new buffer from stream
+ *
+ * RETURN : int8_t type of validation result
+ * >0 -- Valid frame
+ * =0 -- Cannot validate
+ * <0 -- Invalid frame. Can be freed
+ *==========================================================================*/
+int8_t mm_channel_validate_super_buf(mm_channel_t* ch_obj,
+ mm_channel_queue_t *queue, mm_camera_buf_info_t *buf_info)
+{
+ int8_t ret = 0;
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ /* comp */
+ pthread_mutex_lock(&queue->que.lock);
+ head = &queue->que.head.list;
+ /* get the last one in the queue which is possibly having no matching */
+ pos = head->next;
+ while (pos != head) {
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf) {
+ if ((super_buf->expected_frame) &&
+ (buf_info->frame_idx == super_buf->frame_idx)) {
+ //This is good frame. Expecting more frames. Keeping this frame.
+ ret = 1;
+ break;
+ } else {
+ pos = pos->next;
+ continue;
+ }
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_handle_metadata
+ *
+ * DESCRIPTION: Handle frame matching logic change due to metadata
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @queue : superbuf queue
+ * @buf_info: new buffer from stream
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_handle_metadata(
+ mm_channel_t* ch_obj,
+ mm_channel_queue_t * queue,
+ mm_camera_buf_info_t *buf_info)
+{
+
+ int rc = 0 ;
+ mm_stream_t* stream_obj = NULL;
+ stream_obj = mm_channel_util_get_stream_by_handler(ch_obj,
+ buf_info->stream_id);
+ uint8_t is_prep_snapshot_done_valid = 0;
+ uint8_t is_good_frame_idx_range_valid = 0;
+ int32_t prep_snapshot_done_state = 0;
+ cam_frame_idx_range_t good_frame_idx_range;
+ uint8_t is_crop_1x_found = 0;
+ uint32_t snapshot_stream_id = 0;
+ uint32_t i;
+ /* Set expected frame id to a future frame idx, large enough to wait
+ * for good_frame_idx_range, and small enough to still capture an image */
+ const uint32_t max_future_frame_offset = MM_CAMERA_MAX_FUTURE_FRAME_WAIT;
+
+ memset(&good_frame_idx_range, 0, sizeof(good_frame_idx_range));
+
+ if (NULL == stream_obj) {
+ LOGE("Invalid Stream Object for stream_id = %d",
+ buf_info->stream_id);
+ rc = -1;
+ goto end;
+ }
+ if (NULL == stream_obj->stream_info) {
+ LOGE("NULL stream info for stream_id = %d",
+ buf_info->stream_id);
+ rc = -1;
+ goto end;
+ }
+
+ if ((CAM_STREAM_TYPE_METADATA == stream_obj->stream_info->stream_type) &&
+ ((stream_obj->ch_obj == ch_obj) ||
+ ((stream_obj->linked_stream != NULL) &&
+ (stream_obj->linked_stream->linked_obj == ch_obj)))) {
+ const metadata_buffer_t *metadata;
+ metadata = (const metadata_buffer_t *)buf_info->buf->buffer;
+
+ if (NULL == metadata) {
+ LOGE("NULL metadata buffer for metadata stream");
+ rc = -1;
+ goto end;
+ }
+ LOGL("E , expected frame id: %d", queue->expected_frame_id);
+
+ IF_META_AVAILABLE(const int32_t, p_prep_snapshot_done_state,
+ CAM_INTF_META_PREP_SNAPSHOT_DONE, metadata) {
+ prep_snapshot_done_state = *p_prep_snapshot_done_state;
+ is_prep_snapshot_done_valid = 1;
+ LOGH("prepare snapshot done valid ");
+ }
+ IF_META_AVAILABLE(const cam_frame_idx_range_t, p_good_frame_idx_range,
+ CAM_INTF_META_GOOD_FRAME_IDX_RANGE, metadata) {
+ good_frame_idx_range = *p_good_frame_idx_range;
+ is_good_frame_idx_range_valid = 1;
+ LOGH("good_frame_idx_range : min: %d, max: %d , num frames = %d",
+ good_frame_idx_range.min_frame_idx,
+ good_frame_idx_range.max_frame_idx, good_frame_idx_range.num_led_on_frames);
+ }
+ IF_META_AVAILABLE(const cam_crop_data_t, p_crop_data,
+ CAM_INTF_META_CROP_DATA, metadata) {
+ cam_crop_data_t crop_data = *p_crop_data;
+
+ for (i = 0; i < ARRAY_SIZE(ch_obj->streams); i++) {
+ if (MM_STREAM_STATE_NOTUSED == ch_obj->streams[i].state) {
+ continue;
+ }
+ if (CAM_STREAM_TYPE_SNAPSHOT ==
+ ch_obj->streams[i].stream_info->stream_type) {
+ snapshot_stream_id = ch_obj->streams[i].server_stream_id;
+ break;
+ }
+ }
+
+ for (i=0; i<crop_data.num_of_streams; i++) {
+ if (snapshot_stream_id == crop_data.crop_info[i].stream_id) {
+ if (!crop_data.crop_info[i].crop.left &&
+ !crop_data.crop_info[i].crop.top) {
+ is_crop_1x_found = 1;
+ break;
+ }
+ }
+ }
+ }
+
+ IF_META_AVAILABLE(const cam_buf_divert_info_t, p_divert_info,
+ CAM_INTF_BUF_DIVERT_INFO, metadata) {
+ cam_buf_divert_info_t divert_info = *p_divert_info;
+ if (divert_info.frame_id >= buf_info->frame_idx) {
+ ch_obj->diverted_frame_id = divert_info.frame_id;
+ } else {
+ ch_obj->diverted_frame_id = 0;
+ }
+ }
+
+ if (ch_obj->isZoom1xFrameRequested) {
+ if (is_crop_1x_found) {
+ ch_obj->isZoom1xFrameRequested = 0;
+ queue->expected_frame_id = buf_info->frame_idx + 1;
+ } else {
+ queue->expected_frame_id += max_future_frame_offset;
+ /* Flush unwanted frames */
+ mm_channel_superbuf_flush_matched(ch_obj, queue);
+ }
+ goto end;
+ }
+
+ if (ch_obj->startZSlSnapshotCalled && is_good_frame_idx_range_valid) {
+ LOGI("frameID = %d, expected = %d good_frame_idx = %d",
+ buf_info->frame_idx, queue->expected_frame_id,
+ good_frame_idx_range.min_frame_idx);
+ }
+
+ if (is_prep_snapshot_done_valid) {
+ ch_obj->bWaitForPrepSnapshotDone = 0;
+ if (prep_snapshot_done_state == NEED_FUTURE_FRAME) {
+ queue->expected_frame_id += max_future_frame_offset;
+ LOGI("PreFlash Done. Need Main Flash");
+
+ mm_channel_superbuf_flush(ch_obj,
+ queue, CAM_STREAM_TYPE_DEFAULT);
+
+ ch_obj->needLEDFlash = TRUE;
+ } else {
+ ch_obj->needLEDFlash = FALSE;
+ }
+ }
+ if (is_good_frame_idx_range_valid) {
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ queue->good_frame_id = good_frame_idx_range.min_frame_idx;
+ if((ch_obj->needLEDFlash == TRUE) && (ch_obj->burstSnapNum > 1)) {
+ queue->led_on_start_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ queue->led_off_start_frame_id =
+ good_frame_idx_range.max_frame_idx;
+ queue->once = 0;
+ queue->led_on_num_frames =
+ good_frame_idx_range.num_led_on_frames;
+ queue->frame_skip_count = good_frame_idx_range.frame_skip_count;
+ LOGD("Need Flash, expected frame id = %d,"
+ " led_on start = %d, led off start = %d, led on frames = %d ",
+ queue->expected_frame_id, queue->led_on_start_frame_id,
+ queue->led_off_start_frame_id, queue->led_on_num_frames);
+ } else {
+ LOGD("No flash, expected frame id = %d ",
+ queue->expected_frame_id);
+ }
+ } else if ((MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX == ch_obj->bracketingState) &&
+ !is_prep_snapshot_done_valid) {
+ /* Flush unwanted frames */
+ mm_channel_superbuf_flush_matched(ch_obj, queue);
+ queue->expected_frame_id += max_future_frame_offset;
+ }
+ if (ch_obj->isFlashBracketingEnabled &&
+ is_good_frame_idx_range_valid) {
+ /* Flash bracketing needs two frames, with & without led flash.
+ * in valid range min frame is with led flash and max frame is
+ * without led flash */
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ /* max frame is without led flash */
+ queue->expected_frame_id_without_led =
+ good_frame_idx_range.max_frame_idx;
+ queue->good_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ } else if (is_good_frame_idx_range_valid) {
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_ACTIVE;
+ queue->good_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ }
+
+ if (ch_obj->isConfigCapture && is_good_frame_idx_range_valid
+ && (good_frame_idx_range.config_batch_idx < ch_obj->frameConfig.num_batch)) {
+
+ LOGI("Frame Config: Expcted ID = %d batch index = %d",
+ good_frame_idx_range.min_frame_idx, good_frame_idx_range.config_batch_idx);
+ ch_obj->capture_frame_id[good_frame_idx_range.config_batch_idx] =
+ good_frame_idx_range.min_frame_idx;
+
+ if (ch_obj->cur_capture_idx == good_frame_idx_range.config_batch_idx) {
+ queue->expected_frame_id =
+ good_frame_idx_range.min_frame_idx;
+ } else {
+ queue->expected_frame_id =
+ ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+ }
+ queue->good_frame_id = queue->expected_frame_id;
+ }
+
+ if ((ch_obj->burstSnapNum > 1) && (ch_obj->needLEDFlash == TRUE)
+ && !ch_obj->isFlashBracketingEnabled
+ && (MM_CHANNEL_BRACKETING_STATE_OFF == ch_obj->bracketingState)
+ && !ch_obj->isConfigCapture) {
+ if((buf_info->frame_idx >= queue->led_off_start_frame_id)
+ && !queue->once) {
+ LOGD("Burst snap num = %d ",
+ ch_obj->burstSnapNum);
+ // Skip frames from LED OFF frame to get a good frame
+ queue->expected_frame_id = queue->led_off_start_frame_id +
+ queue->frame_skip_count;
+ queue->once = 1;
+ ch_obj->stopZslSnapshot = 1;
+ ch_obj->needLEDFlash = FALSE;
+ LOGD("Reached max led on frames = %d , expected id = %d",
+ buf_info->frame_idx, queue->expected_frame_id);
+ }
+ }
+
+ IF_META_AVAILABLE(const cam_low_light_mode_t, low_light_level,
+ CAM_INTF_META_LOW_LIGHT, metadata) {
+ ch_obj->needLowLightZSL = *low_light_level;
+ }
+
+ // For the instant capture case, if AEC settles before expected frame ID from user,
+ // reset the expected frame ID to current frame index.
+ if (queue->attr.user_expected_frame_id > 0) {
+ if (queue->attr.user_expected_frame_id > buf_info->frame_idx) {
+ IF_META_AVAILABLE(const cam_3a_params_t, ae_params,
+ CAM_INTF_META_AEC_INFO, metadata) {
+ if (ae_params->settled) {
+ queue->expected_frame_id = buf_info->frame_idx;
+ // Reset the expected frame ID from HAL to 0
+ queue->attr.user_expected_frame_id = 0;
+ LOGD("AEC settled, reset expected frame ID from user");
+ }
+ }
+ } else {
+ // Reset the expected frame ID from HAL to 0 after
+ // current frame index is greater than expected id.
+ queue->attr.user_expected_frame_id = 0;
+ LOGD("reset expected frame ID from user as it reached the bound");
+ }
+ }
+ }
+end:
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_comp_and_enqueue
+ *
+ * DESCRIPTION: implementation for matching logic for superbuf
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @queue : superbuf queue
+ * @buf_info: new buffer from stream
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_comp_and_enqueue(
+ mm_channel_t* ch_obj,
+ mm_channel_queue_t *queue,
+ mm_camera_buf_info_t *buf_info)
+{
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+ uint8_t buf_s_idx, i, found_super_buf, unmatched_bundles;
+ struct cam_list *last_buf, *insert_before_buf, *last_buf_ptr;
+
+ LOGD("E");
+
+ for (buf_s_idx = 0; buf_s_idx < queue->num_streams; buf_s_idx++) {
+ if (buf_info->stream_id == queue->bundled_streams[buf_s_idx]) {
+ break;
+ }
+ }
+
+ if (buf_s_idx == queue->num_streams) {
+ LOGE("buf from stream (%d) not bundled", buf_info->stream_id);
+ return -1;
+ }
+
+ if(buf_info->frame_idx == 0) {
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ if (mm_channel_handle_metadata(ch_obj, queue, buf_info) < 0) {
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return -1;
+ }
+
+ if ((mm_channel_util_seq_comp_w_rollover(buf_info->frame_idx,
+ queue->expected_frame_id) < 0) &&
+ (mm_channel_validate_super_buf(ch_obj, queue, buf_info) <= 0)) {
+ LOGH("incoming buf id(%d) is older than expected buf id(%d), will discard it",
+ buf_info->frame_idx, queue->expected_frame_id);
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ /* comp */
+ pthread_mutex_lock(&queue->que.lock);
+ head = &queue->que.head.list;
+ /* get the last one in the queue which is possibly having no matching */
+ pos = head->next;
+
+ found_super_buf = 0;
+ unmatched_bundles = 0;
+ last_buf = NULL;
+ insert_before_buf = NULL;
+ last_buf_ptr = NULL;
+
+ while (pos != head) {
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+
+ if (NULL != super_buf) {
+ if (super_buf->matched) {
+ /* find a matched super buf, move to next one */
+ pos = pos->next;
+ continue;
+ } else if (( buf_info->frame_idx == super_buf->frame_idx )
+ /*Pick metadata greater than available frameID*/
+ || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+ && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
+ && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)
+ && (super_buf->frame_idx < buf_info->frame_idx))
+ /*Pick available metadata closest to frameID*/
+ || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+ && (buf_info->buf->stream_type != CAM_STREAM_TYPE_METADATA)
+ && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
+ && (super_buf->unmatched_meta_idx > buf_info->frame_idx))){
+ /*super buffer frame IDs matching OR In low priority bundling
+ metadata frameID greater than avialbale super buffer frameID OR
+ metadata frame closest to incoming frameID will be bundled*/
+ found_super_buf = 1;
+ /* If we are filling into a 'meta only' superbuf, make sure to reset
+ the super_buf frame_idx so that missing streams in this superbuf
+ are filled as per matching frame id logic. Note that, in low priority
+ queue, only meta frame id need not match (closest suffices) but
+ the other streams in this superbuf should have same frame id. */
+ if (super_buf->unmatched_meta_idx > 0) {
+ super_buf->unmatched_meta_idx = 0;
+ super_buf->frame_idx = buf_info->frame_idx;
+ }
+ break;
+ } else {
+ unmatched_bundles++;
+ if ( NULL == last_buf ) {
+ if ( super_buf->frame_idx < buf_info->frame_idx ) {
+ last_buf = pos;
+ }
+ }
+ if ( NULL == insert_before_buf ) {
+ if ( super_buf->frame_idx > buf_info->frame_idx ) {
+ insert_before_buf = pos;
+ }
+ }
+ pos = pos->next;
+ }
+ }
+ }
+
+ if ( found_super_buf ) {
+ if(super_buf->super_buf[buf_s_idx].frame_idx != 0) {
+ //This can cause frame drop. We are overwriting same memory.
+ pthread_mutex_unlock(&queue->que.lock);
+ LOGW("Warning: frame is already in camera ZSL queue");
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ /*Insert incoming buffer to super buffer*/
+ super_buf->super_buf[buf_s_idx] = *buf_info;
+
+ /* check if superbuf is all matched */
+ super_buf->matched = 1;
+ for (i=0; i < super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx == 0) {
+ super_buf->matched = 0;
+ break;
+ }
+ }
+
+ if (super_buf->matched) {
+ if(ch_obj->isFlashBracketingEnabled) {
+ queue->expected_frame_id =
+ queue->expected_frame_id_without_led;
+ if (buf_info->frame_idx >=
+ queue->expected_frame_id_without_led) {
+ ch_obj->isFlashBracketingEnabled = FALSE;
+ }
+ } else {
+ queue->expected_frame_id = buf_info->frame_idx
+ + queue->attr.post_frame_skip;
+ }
+
+ super_buf->expected_frame = FALSE;
+
+ LOGD("curr = %d, skip = %d , Expected Frame ID: %d",
+ buf_info->frame_idx,
+ queue->attr.post_frame_skip, queue->expected_frame_id);
+
+ queue->match_cnt++;
+ if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ pthread_mutex_lock(&fs_lock);
+ mm_frame_sync_add(buf_info->frame_idx, ch_obj);
+ pthread_mutex_unlock(&fs_lock);
+ }
+ /* Any older unmatched buffer need to be released */
+ if ( last_buf ) {
+ while ( last_buf != pos ) {
+ node = member_of(last_buf, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ queue->que.size--;
+ last_buf = last_buf->next;
+ cam_list_del_node(&node->list);
+ free(node);
+ free(super_buf);
+ } else {
+ LOGE("Invalid superbuf in queue!");
+ break;
+ }
+ }
+ }
+ }else {
+ if (ch_obj->diverted_frame_id == buf_info->frame_idx) {
+ super_buf->expected_frame = TRUE;
+ ch_obj->diverted_frame_id = 0;
+ }
+ }
+ } else {
+ if ((queue->attr.max_unmatched_frames < unmatched_bundles)
+ && ( NULL == last_buf )) {
+ /* incoming frame is older than the last bundled one */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ } else {
+ last_buf_ptr = last_buf;
+
+ /* Loop to remove unmatched frames */
+ while ((queue->attr.max_unmatched_frames < unmatched_bundles)
+ && (last_buf_ptr != NULL && last_buf_ptr != pos)) {
+ node = member_of(last_buf_ptr, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf && super_buf->expected_frame == FALSE
+ && (&node->list != insert_before_buf)) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ queue->que.size--;
+ cam_list_del_node(&node->list);
+ free(node);
+ free(super_buf);
+ unmatched_bundles--;
+ }
+ last_buf_ptr = last_buf_ptr->next;
+ }
+
+ if (queue->attr.max_unmatched_frames < unmatched_bundles) {
+ node = member_of(last_buf, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ queue->que.size--;
+ cam_list_del_node(&node->list);
+ free(node);
+ free(super_buf);
+ }
+
+ /* insert the new frame at the appropriate position. */
+
+ mm_channel_queue_node_t *new_buf = NULL;
+ cam_node_t* new_node = NULL;
+
+ new_buf = (mm_channel_queue_node_t*)malloc(sizeof(mm_channel_queue_node_t));
+ new_node = (cam_node_t*)malloc(sizeof(cam_node_t));
+ if (NULL != new_buf && NULL != new_node) {
+ memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+ memset(new_node, 0, sizeof(cam_node_t));
+ new_node->data = (void *)new_buf;
+ new_buf->num_of_bufs = queue->num_streams;
+ new_buf->super_buf[buf_s_idx] = *buf_info;
+ new_buf->frame_idx = buf_info->frame_idx;
+
+ if ((ch_obj->diverted_frame_id == buf_info->frame_idx)
+ || (buf_info->frame_idx == queue->good_frame_id)) {
+ new_buf->expected_frame = TRUE;
+ ch_obj->diverted_frame_id = 0;
+ }
+
+ /* enqueue */
+ if ( insert_before_buf ) {
+ cam_list_insert_before_node(&new_node->list, insert_before_buf);
+ } else {
+ cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+ }
+ queue->que.size++;
+
+ if(queue->num_streams == 1) {
+ new_buf->matched = 1;
+ new_buf->expected_frame = FALSE;
+ queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+ queue->match_cnt++;
+ if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ pthread_mutex_lock(&fs_lock);
+ mm_frame_sync_add(buf_info->frame_idx, ch_obj);
+ pthread_mutex_unlock(&fs_lock);
+ }
+ }
+ /* In low priority queue, this will become a 'meta only' superbuf. Set the
+ unmatched_frame_idx so that the upcoming stream buffers (other than meta)
+ can be filled into this which are nearest to this idx. */
+ if ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+ && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)) {
+ new_buf->unmatched_meta_idx = buf_info->frame_idx;
+ }
+ } else {
+ /* No memory */
+ if (NULL != new_buf) {
+ free(new_buf);
+ }
+ if (NULL != new_node) {
+ free(new_node);
+ }
+ /* qbuf the new buf since we cannot enqueue */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ }
+ }
+ }
+
+ pthread_mutex_unlock(&queue->que.lock);
+ LOGD("X");
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_dequeue_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ * @queue : superbuf queue
+ * @matched_only : if dequeued buf should be matched
+ * @ch_obj : channel object
+ *
+ * RETURN : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_internal(
+ mm_channel_queue_t * queue,
+ uint8_t matched_only, mm_channel_t *ch_obj)
+{
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ head = &queue->que.head.list;
+ pos = head->next;
+ if (pos != head) {
+ /* get the first node */
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if ( (NULL != super_buf) &&
+ (matched_only == TRUE) &&
+ (super_buf->matched == FALSE) ) {
+ /* require to dequeue matched frame only, but this superbuf is not matched,
+ simply set return ptr to NULL */
+ super_buf = NULL;
+ }
+ if (NULL != super_buf) {
+ /* remove from the queue */
+ cam_list_del_node(&node->list);
+ queue->que.size--;
+ if (super_buf->matched == TRUE) {
+ queue->match_cnt--;
+ if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+ pthread_mutex_lock(&fs_lock);
+ mm_frame_sync_remove(super_buf->frame_idx);
+ pthread_mutex_unlock(&fs_lock);
+ }
+ }
+ free(node);
+ }
+ }
+
+ return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_dequeue_frame_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue based on frame index
+ * from the superbuf queue
+ *
+ * PARAMETERS :
+ * @queue : superbuf queue
+ * @frame_idx : frame index to be dequeued
+ *
+ * RETURN : ptr to a node from superbuf queue with matched frame index
+ * : NULL if not found
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_frame_internal(
+ mm_channel_queue_t * queue, uint32_t frame_idx)
+{
+ cam_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ if (!queue) {
+ LOGE("queue is NULL");
+ return NULL;
+ }
+
+ head = &queue->que.head.list;
+ pos = head->next;
+ LOGL("Searching for match frame %d", frame_idx);
+ while ((pos != head) && (pos != NULL)) {
+ /* get the first node */
+ node = member_of(pos, cam_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (super_buf && super_buf->matched &&
+ (super_buf->frame_idx == frame_idx)) {
+ /* remove from the queue */
+ cam_list_del_node(&node->list);
+ queue->que.size--;
+ queue->match_cnt--;
+ LOGH("Found match frame %d", frame_idx);
+ free(node);
+ break;
+ }
+ else {
+ LOGH("match frame not found %d", frame_idx);
+ super_buf = NULL;
+ }
+ pos = pos->next;
+ }
+ return super_buf;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_dequeue
+ *
+ * DESCRIPTION: dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ * @queue : superbuf queue
+ * @ch_obj : channel object
+ *
+ * RETURN : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(
+ mm_channel_queue_t * queue, mm_channel_t *ch_obj)
+{
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, ch_obj);
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_bufdone_overflow
+ *
+ * DESCRIPTION: keep superbuf queue no larger than watermark set by upper layer
+ * via channel attribute
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+ /* for continuous streaming mode, no overflow is needed */
+ return 0;
+ }
+
+ LOGD("before match_cnt=%d, water_mark=%d",
+ queue->match_cnt, queue->attr.water_mark);
+ /* bufdone overflowed bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ while (queue->match_cnt > queue->attr.water_mark) {
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+ LOGD("after match_cnt=%d, water_mark=%d",
+ queue->match_cnt, queue->attr.water_mark);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_skip
+ *
+ * DESCRIPTION: depends on the lookback configuration of the channel attribute,
+ * unwanted superbufs will be removed from the superbuf queue.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+ /* for continuous streaming mode, no skip is needed */
+ return 0;
+ }
+
+ /* bufdone overflowed bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ while (queue->match_cnt > queue->attr.look_back) {
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_flush
+ *
+ * DESCRIPTION: flush the superbuf queue.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ * @cam_type: flush only particular type (default flushes all)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue, cam_stream_type_t cam_type)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+ /* bufdone bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE, my_obj);
+ while (super_buf != NULL) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ stream_type = super_buf->super_buf[i].buf->stream_type;
+ if ((CAM_STREAM_TYPE_DEFAULT == cam_type) ||
+ (cam_type == stream_type)) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ }
+ free(super_buf);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE, my_obj);
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_proc_general_cmd
+ *
+ * DESCRIPTION: process general command
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+ mm_camera_generic_cmd_t *p_gen_cmd)
+{
+ LOGD("E");
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->u.gen_cmd = *p_gen_cmd;
+ node->cmd_type = MM_CAMERA_CMD_TYPE_GENERAL;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -1;
+ }
+ LOGD("X");
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_superbuf_flush_matched
+ *
+ * DESCRIPTION: flush matched buffers from the superbuf queue.
+ *
+ * PARAMETERS :
+ * @my_obj : channel object
+ * @queue : superbuf queue
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ /* bufdone bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ while (super_buf != NULL) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_reset
+ *
+ * DESCRIPTION: Reset Frame sync info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_frame_sync_reset() {
+ memset(&fs, 0x0, sizeof(fs));
+ LOGD("Reset Done");
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_register_channel
+ *
+ * DESCRIPTION: Register Channel for frame sync
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_register_channel(mm_channel_t *ch_obj) {
+ // Lock frame sync info
+ pthread_mutex_lock(&fs_lock);
+ if ((fs.num_cam >= MAX_NUM_CAMERA_PER_BUNDLE) || (!ch_obj)) {
+ LOGE("Error!! num cam(%d) is out of range ",
+ fs.num_cam);
+ pthread_mutex_unlock(&fs_lock);
+ return -1;
+ }
+ if (fs.num_cam == 0) {
+ LOGH("First channel registering!!");
+ mm_frame_sync_reset();
+ }
+ uint8_t i = 0;
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.ch_obj[i] == NULL) {
+ fs.ch_obj[i] = ch_obj;
+ fs.cb[i] = ch_obj->bundle.super_buf_notify_cb;
+ fs.num_cam++;
+ LOGD("DBG_FS index %d", i);
+ break;
+ }
+ }
+ if (i >= MAX_NUM_CAMERA_PER_BUNDLE) {
+ LOGH("X, DBG_FS Cannot register channel!!");
+ pthread_mutex_unlock(&fs_lock);
+ return -1;
+ }
+ LOGH("num_cam %d ", fs.num_cam);
+ pthread_mutex_unlock(&fs_lock);
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_unregister_channel
+ *
+ * DESCRIPTION: un-register Channel for frame sync
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_unregister_channel(mm_channel_t *ch_obj) {
+ uint8_t i = 0;
+ // Lock frame sync info
+ pthread_mutex_lock(&fs_lock);
+ if (!fs.num_cam || !ch_obj) {
+ LOGH("X, DBG_FS: channel not found !!");
+ // Lock frame sync info
+ pthread_mutex_unlock(&fs_lock);
+ return -1;
+ }
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.ch_obj[i] == ch_obj) {
+ LOGD("found ch_obj at i (%d) ", i);
+ break;
+ }
+ }
+ if (i < MAX_NUM_CAMERA_PER_BUNDLE) {
+ LOGD("remove channel info ");
+ fs.ch_obj[i] = NULL;
+ fs.cb[i] = NULL;
+ fs.num_cam--;
+ } else {
+ LOGD("DBG_FS Channel not found ");
+ }
+ if (fs.num_cam == 0) {
+ mm_frame_sync_reset();
+ }
+ LOGH("X, fs.num_cam %d", fs.num_cam);
+ pthread_mutex_unlock(&fs_lock);
+ return 0;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_add
+ *
+ * DESCRIPTION: Add frame info into frame sync nodes
+ *
+ * PARAMETERS :
+ * @frame_id : frame id to be added
+ * @ch_obj : channel object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_add(uint32_t frame_id, mm_channel_t *ch_obj) {
+
+ LOGD("E, frame id %d ch_obj %p", frame_id, ch_obj);
+ if (!frame_id || !ch_obj) {
+ LOGH("X : Error, cannot add sync frame !!");
+ return -1;
+ }
+
+ int8_t ch_idx = -1;
+ uint8_t i = 0;
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.ch_obj[i] == ch_obj) {
+ ch_idx = i;
+ LOGD("ch id %d ", ch_idx);
+ break;
+ }
+ }
+ if (ch_idx < 0) {
+ LOGH("X : DBG_FS ch not found!!");
+ return -1;
+ }
+ int8_t index = mm_frame_sync_find_frame_index(frame_id);
+ if ((index >= 0) && (index < MM_CAMERA_FRAME_SYNC_NODES)) {
+ fs.node[index].frame_valid[ch_idx] = 1;
+ } else if (index < 0) {
+ if (fs.pos >= MM_CAMERA_FRAME_SYNC_NODES) {
+ fs.pos = 0;
+ }
+ index = fs.pos;
+ memset(&fs.node[index], 0x00, sizeof(mm_channel_sync_node_t));
+ fs.pos++;
+ fs.node[index].frame_idx = frame_id;
+ fs.node[index].frame_valid[ch_idx] = 1;
+ if (fs.num_cam == 1) {
+ LOGD("Single camera frame %d , matched ", frame_id);
+ fs.node[index].matched = 1;
+ }
+ }
+ uint8_t frames_valid = 0;
+ if (!fs.node[index].matched) {
+ for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+ if (fs.node[index].frame_valid[i]) {
+ frames_valid++;
+ }
+ }
+ if (frames_valid == fs.num_cam) {
+ fs.node[index].matched = 1;
+ LOGD("dual camera frame %d , matched ",
+ frame_id);
+ }
+ }
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_remove
+ *
+ * DESCRIPTION: Remove frame info from frame sync nodes
+ *
+ * PARAMETERS :
+ * @frame_id : frame id to be removed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_remove(uint32_t frame_id) {
+ int8_t index = -1;
+
+ LOGD("E, frame_id %d", frame_id);
+ if (!frame_id) {
+ LOGE("X, DBG_FS frame id invalid");
+ return -1;
+ }
+
+ index = mm_frame_sync_find_frame_index(frame_id);
+ if ((index >= 0) && (index < MM_CAMERA_FRAME_SYNC_NODES)) {
+ LOGD("Removing sync frame %d", frame_id);
+ memset(&fs.node[index], 0x00, sizeof(mm_channel_sync_node_t));
+ }
+ LOGD("X ");
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_find_matched
+ *
+ * DESCRIPTION: Find a matched sync frame from the node array
+ *
+ * PARAMETERS :
+ * @oldest : If enabled, find oldest matched frame.,
+ * If not enabled, get the first matched frame found
+ *
+ * RETURN : unt32_t type of status
+ * 0 -- If no matched frames found
+ * frame index: inf matched frame found
+ *==========================================================================*/
+uint32_t mm_frame_sync_find_matched(uint8_t oldest) {
+ LOGH("E, oldest %d ", oldest);
+ uint8_t i = 0;
+ uint32_t frame_idx = 0;
+ uint32_t curr_frame_idx = 0;
+ for (i = 0; i < MM_CAMERA_FRAME_SYNC_NODES; i++) {
+ if (fs.node[i].matched) {
+ curr_frame_idx = fs.node[i].frame_idx;
+ if (!frame_idx) {
+ frame_idx = curr_frame_idx;
+ }
+ if (!oldest) {
+ break;
+ } else if (frame_idx > curr_frame_idx) {
+ frame_idx = curr_frame_idx;
+ }
+ }
+ }
+ LOGH("X, oldest %d frame idx %d", oldest, frame_idx);
+ return frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_find_frame_index
+ *
+ * DESCRIPTION: Find sync frame index if present
+ *
+ * PARAMETERS :
+ * @frame_id : frame id to be searched
+ *
+ * RETURN : int8_t type of status
+ * -1 -- If desired frame not found
+ * index: node array index if frame is found
+ *==========================================================================*/
+int8_t mm_frame_sync_find_frame_index(uint32_t frame_id) {
+
+ LOGD("E, frame_id %d", frame_id);
+ int8_t index = -1, i = 0;
+ for (i = 0; i < MM_CAMERA_FRAME_SYNC_NODES; i++) {
+ if (fs.node[i].frame_idx == frame_id) {
+ index = i;
+ break;
+ }
+ }
+ LOGD("X index :%d", index);
+ return index;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_lock_queues
+ *
+ * DESCRIPTION: Lock all channel queues present in node info
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_frame_sync_lock_queues() {
+ uint8_t j = 0;
+ LOGD("E ");
+ for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+ if (fs.ch_obj[j]) {
+ mm_channel_queue_t *ch_queue =
+ &fs.ch_obj[j]->bundle.superbuf_queue;
+ if (ch_queue) {
+ pthread_mutex_lock(&ch_queue->que.lock);
+ LOGL("Done locking fs.ch_obj[%d] ", j);
+ }
+ }
+ }
+ pthread_mutex_lock(&fs_lock);
+ LOGD("X ");
+}
+
+/*===========================================================================
+ * FUNCTION : mm_frame_sync_unlock_queues
+ *
+ * DESCRIPTION: Unlock all channel queues
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_frame_sync_unlock_queues() {
+ // Unlock all queues
+ uint8_t j = 0;
+ LOGD("E ");
+ pthread_mutex_unlock(&fs_lock);
+ LOGL("Done unlocking fs ");
+ for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+ if (fs.ch_obj[j]) {
+ mm_channel_queue_t *ch_queue =
+ &fs.ch_obj[j]->bundle.superbuf_queue;
+ if (ch_queue) {
+ pthread_mutex_unlock(&ch_queue->que.lock);
+ LOGL("Done unlocking fs.ch_obj[%d] ", j);
+ }
+ }
+ }
+ LOGD("X ");
+}
+
+/*===========================================================================
+ * FUNCTION : mm_channel_node_qbuf
+ *
+ * DESCRIPTION: qbuf all buffers in a node
+ *
+ * PARAMETERS :
+ * @ch_obj : Channel info
+ * @node : node to qbuf
+ *
+ * RETURN : None
+ *==========================================================================*/
+void mm_channel_node_qbuf(mm_channel_t *ch_obj, mm_channel_queue_node_t *node) {
+ uint8_t i;
+ if (!ch_obj || !node) {
+ return;
+ }
+ for (i = 0; i < node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ return;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
new file mode 100644
index 0000000..167e7fe
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -0,0 +1,2052 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <linux/media.h>
+#include <media/msm_cam_sensor.h>
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera_sock.h"
+#include "mm_camera.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+#define CAM_SENSOR_FACING_MASK (1U<<16) // 16th (starting from 0) bit tells its a BACK or FRONT camera
+#define CAM_SENSOR_TYPE_MASK (1U<<24) // 24th (starting from 0) bit tells its a MAIN or AUX camera
+#define CAM_SENSOR_FORMAT_MASK (1U<<25) // 25th (starting from 0) bit tells its YUV sensor or not
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_generate_handler
+ *
+ * DESCRIPTION: utility function to generate handler for camera/channel/stream
+ *
+ * PARAMETERS :
+ * @index: index of the object to have handler
+ *
+ * RETURN : uint32_t type of handle that uniquely identify the object
+ *==========================================================================*/
+uint32_t mm_camera_util_generate_handler(uint8_t index)
+{
+ uint32_t handler = 0;
+ pthread_mutex_lock(&g_handler_lock);
+ g_handler_history_count++;
+ if (0 == g_handler_history_count) {
+ g_handler_history_count++;
+ }
+ handler = g_handler_history_count;
+ handler = (handler<<8) | index;
+ pthread_mutex_unlock(&g_handler_lock);
+ return handler;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_index_by_handler
+ *
+ * DESCRIPTION: utility function to get index from handle
+ *
+ * PARAMETERS :
+ * @handler: object handle
+ *
+ * RETURN : uint8_t type of index derived from handle
+ *==========================================================================*/
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
+{
+ return (handler&0x000000ff);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_dev_name
+ *
+ * DESCRIPTION: utility function to get device name from camera handle
+ *
+ * PARAMETERS :
+ * @cam_handle: camera handle
+ *
+ * RETURN : char ptr to the device name stored in global variable
+ * NOTE : caller should not free the char ptr
+ *==========================================================================*/
+const char *mm_camera_util_get_dev_name(uint32_t cam_handle)
+{
+ char *dev_name = NULL;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+ if(cam_idx < MM_CAMERA_MAX_NUM_SENSORS) {
+ dev_name = g_cam_ctrl.video_dev_name[cam_idx];
+ }
+ return dev_name;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_util_get_camera_by_handler
+ *
+ * DESCRIPTION: utility function to get camera object from camera handle
+ *
+ * PARAMETERS :
+ * @cam_handle: camera handle
+ *
+ * RETURN : ptr to the camera object stored in global variable
+ * NOTE : caller should not free the camera object ptr
+ *==========================================================================*/
+mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handle)
+{
+ mm_camera_obj_t *cam_obj = NULL;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+
+ if (cam_idx < MM_CAMERA_MAX_NUM_SENSORS &&
+ (NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+ (cam_handle == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
+ cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+ }
+ return cam_obj;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_query_capability(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E: camera_handler = %d ", camera_handle);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_query_capability(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_parms(uint32_t camera_handle,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_set_parms(my_obj, parms);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_parms(uint32_t camera_handle,
+ parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_parms(my_obj, parms);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call success, we will always assume there will
+ * be an auto_focus event following up.
+ *==========================================================================*/
+static int32_t mm_camera_intf_do_auto_focus(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_do_auto_focus(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_auto_focus(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_cancel_auto_focus(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @do_af_flag : flag indicating if AF is needed
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_prepare_snapshot(uint32_t camera_handle,
+ int32_t do_af_flag)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_prepare_snapshot(my_obj, do_af_flag);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_flush
+ *
+ * DESCRIPTION: flush the current camera state and buffers
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_flush(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_close
+ *
+ * DESCRIPTION: close a camera by its handle
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_close(uint32_t camera_handle)
+{
+ int32_t rc = -1;
+ uint8_t cam_idx = camera_handle & 0x00ff;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E: camera_handler = %d ", camera_handle);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if (my_obj){
+ my_obj->ref_count--;
+
+ if(my_obj->ref_count > 0) {
+ /* still have reference to obj, return here */
+ LOGD("ref_count=%d\n", my_obj->ref_count);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = 0;
+ } else {
+ /* need close camera here as no other reference
+ * first empty g_cam_ctrl's referent to cam_obj */
+ g_cam_ctrl.cam_obj[cam_idx] = NULL;
+
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_close(my_obj);
+ pthread_mutex_destroy(&my_obj->cam_lock);
+ free(my_obj);
+ }
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @attr : bundle attribute of the channel if needed
+ * @channel_cb : callback function for bundle data notify
+ * @userdata : user data ptr
+ *
+ * RETURN : uint32_t type of channel handle
+ * 0 -- invalid channel handle, meaning the op failed
+ * >0 -- successfully added a channel with a valid handle
+ * NOTE : if no bundle data notify is needed, meaning each stream in the
+ * channel will have its own stream data notify callback, then
+ * attr, channel_cb, and userdata can be NULL. In this case,
+ * no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ uint32_t ch_id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d", camera_handle);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X ch_id = %d", ch_id);
+ return ch_id;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_channel(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E ch_id = %d", ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_del_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @bundle_info : bundle info to be filled in
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : all streams in the channel should be stopped already before
+ * this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_bundle_info(uint32_t camera_handle,
+ uint32_t ch_id,
+ cam_bundle_config_t *bundle_info)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E ch_id = %d", ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_bundle_info(my_obj, ch_id, bundle_info);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_register_event_notify
+ *
+ * DESCRIPTION: register for event notify
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @evt_cb : callback for event notify
+ * @user_data : user data ptr
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_event_notify(uint32_t camera_handle,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E ");
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_register_event_notify(my_obj, evt_cb, user_data);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("E rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @buf : buf ptr to be enqueued
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_qbuf(uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_qbuf(my_obj, ch_id, buf);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X evt_type = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_queued_buf_count
+ *
+ * DESCRIPTION: returns the queued buffer count
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream id
+ *
+ * RETURN : int32_t - queued buffer count
+ *
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_queued_buf_count(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_queued_buf_count(my_obj, ch_id, stream_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X queued buffer count = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_link_stream
+ *
+ * DESCRIPTION: link a stream into a new channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream id
+ * @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN : int32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+static int32_t mm_camera_intf_link_stream(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint32_t linked_ch_id)
+{
+ uint32_t id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %u ch_id = %u",
+ camera_handle, ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ id = mm_camera_link_stream(my_obj, ch_id, stream_id, linked_ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X stream_id = %u", stream_id);
+ return (int32_t)id;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : uint32_t type of stream handle
+ * 0 -- invalid stream handle, meaning the op failed
+ * >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ uint32_t stream_id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %d ch_id = %d",
+ camera_handle, ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ stream_id = mm_camera_add_stream(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X stream_id = %d", stream_id);
+ return stream_id;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_stream(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %d ch_id = %d stream_id = %d",
+ camera_handle, ch_id, stream_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_del_stream(my_obj, ch_id, stream_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_config_stream(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %d, ch_id = %d,stream_id = %d",
+ camera_handle, ch_id, stream_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("mm_camera_intf_config_stream stream_id = %d",stream_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_config_stream(my_obj, ch_id, stream_id, config);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_start_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_channel(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_stop_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ * frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @buf : request buffer info
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_request_super_buf(uint32_t camera_handle,
+ uint32_t ch_id, mm_camera_req_buf_t *buf)
+{
+ int32_t rc = -1;
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj && buf) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_request_super_buf (my_obj, ch_id, buf);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ * of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_super_buf_request(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @frame_idx : frame index
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush_super_buf_queue(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t frame_idx)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_flush_super_buf_queue(my_obj, ch_id, frame_idx);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_start_zsl_snapshot
+ *
+ * DESCRIPTION: Starts zsl snapshot
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_zsl_snapshot(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_start_zsl_snapshot_ch(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_stop_zsl_snapshot
+ *
+ * DESCRIPTION: Stops zsl snapshot
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_zsl_snapshot(uint32_t camera_handle,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_stop_zsl_snapshot_ch(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_configure_notify_mode
+ *
+ * DESCRIPTION: Configures channel notification mode
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @notify_mode : notification mode
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_configure_notify_mode(uint32_t camera_handle,
+ uint32_t ch_id,
+ mm_camera_super_buf_notify_mode_t notify_mode)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_config_channel_notify(my_obj, ch_id, notify_mode);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_buf(uint32_t camera_handle,
+ uint8_t buf_type,
+ int fd,
+ size_t size)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_buf(my_obj, buf_type, fd, size);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_intf_map_bufs(uint32_t camera_handle,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_bufs(my_obj, buf_map_list);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @buf_type : type of buffer to be unmapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_CAPABILITY
+ * CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ * CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_buf(uint32_t camera_handle,
+ uint8_t buf_type)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_unmap_buf(my_obj, buf_type);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_stream_parms(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d,ch_id = %d,s_id = %d",
+ camera_handle, ch_id, s_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_set_stream_parms(my_obj, ch_id, s_id, parms);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @parms : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Parameters to be get from server are already
+ * filled in by upper layer caller. After this call, corresponding
+ * fields of requested parameters will be filled in by server with
+ * detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_stream_parms(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ cam_stream_parm_buffer_t *parms)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d,ch_id = %d,s_id = %d",
+ camera_handle, ch_id, s_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_stream_parms(my_obj, ch_id, s_id, parms);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_buf(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+ camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_stream_buf(my_obj, ch_id, stream_id,
+ buf_type, buf_idx, plane_idx,
+ fd, size);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_bufs(uint32_t camera_handle,
+ uint32_t ch_id,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d, ch_id = %d",
+ camera_handle, ch_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_map_stream_bufs(my_obj, ch_id, buf_map_list);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @s_id : stream handle
+ * @buf_type : type of buffer to be unmapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @buf_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_stream_buf(uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ uint8_t buf_type,
+ uint32_t buf_idx,
+ int32_t plane_idx)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ LOGD("E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+ camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_unmap_stream_buf(my_obj, ch_id, stream_id,
+ buf_type, buf_idx, plane_idx);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_get_session_id
+ *
+ * DESCRIPTION: retrieve the session ID from the kernel for this HWI instance
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @sessionid: session id to be retrieved from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call succeeds, we will get a valid session id.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_session_id(uint32_t camera_handle,
+ uint32_t* sessionid)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_session_id(my_obj, sessionid);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_sync_related_sensors
+ *
+ * DESCRIPTION: retrieve the session ID from the kernel for this HWI instance
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @related_cam_info: pointer to the related cam info to be sent to the server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : if this call succeeds, we will get linking established in back end
+ *==========================================================================*/
+static int32_t mm_camera_intf_sync_related_sensors(uint32_t camera_handle,
+ cam_sync_related_sensors_event_info_t* related_cam_info)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_sync_related_sensors(my_obj, related_cam_info);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : get_sensor_info
+ *
+ * DESCRIPTION: get sensor info like facing(back/front) and mount angle
+ *
+ * PARAMETERS :
+ *
+ * RETURN :
+ *==========================================================================*/
+void get_sensor_info()
+{
+ int rc = 0;
+ int dev_fd = -1;
+ struct media_device_info mdev_info;
+ int num_media_devices = 0;
+ size_t num_cameras = 0;
+
+ LOGD("E");
+ while (1) {
+ char dev_name[32];
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ LOGD("Done discovering media devices\n");
+ break;
+ }
+ num_media_devices++;
+ memset(&mdev_info, 0, sizeof(mdev_info));
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ dev_fd = -1;
+ num_cameras = 0;
+ break;
+ }
+
+ if(strncmp(mdev_info.model, MSM_CONFIGURATION_NAME, sizeof(mdev_info.model)) != 0) {
+ close(dev_fd);
+ dev_fd = -1;
+ continue;
+ }
+
+ unsigned int num_entities = 1;
+ while (1) {
+ struct media_entity_desc entity;
+ uint32_t temp;
+ uint32_t mount_angle;
+ uint32_t facing;
+ int32_t type = 0;
+ uint8_t is_yuv;
+
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ LOGD("Done enumerating media entities\n");
+ rc = 0;
+ break;
+ }
+ if(entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+ entity.group_id == MSM_CAMERA_SUBDEV_SENSOR) {
+ temp = entity.flags >> 8;
+ mount_angle = (temp & 0xFF) * 90;
+ facing = ((entity.flags & CAM_SENSOR_FACING_MASK) ?
+ CAMERA_FACING_FRONT:CAMERA_FACING_BACK);
+ /* TODO: Need to revisit this logic if front AUX is available. */
+ if ((unsigned int)facing == CAMERA_FACING_FRONT) {
+ type = CAM_TYPE_STANDALONE;
+ } else if (entity.flags & CAM_SENSOR_TYPE_MASK) {
+ type = CAM_TYPE_AUX;
+ } else {
+ type = CAM_TYPE_MAIN;
+ }
+ is_yuv = ((entity.flags & CAM_SENSOR_FORMAT_MASK) ?
+ CAM_SENSOR_YUV:CAM_SENSOR_RAW);
+ LOGL("index = %u flag = %x mount_angle = %u "
+ "facing = %u type: %u is_yuv = %u\n",
+ (unsigned int)num_cameras, (unsigned int)temp,
+ (unsigned int)mount_angle, (unsigned int)facing,
+ (unsigned int)type, (uint8_t)is_yuv);
+ g_cam_ctrl.info[num_cameras].facing = (int)facing;
+ g_cam_ctrl.info[num_cameras].orientation = (int)mount_angle;
+ g_cam_ctrl.cam_type[num_cameras] = type;
+ g_cam_ctrl.is_yuv[num_cameras] = is_yuv;
+ LOGD("dev_info[id=%zu,name='%s']\n",
+ num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+ num_cameras++;
+ continue;
+ }
+ }
+ close(dev_fd);
+ dev_fd = -1;
+ }
+
+ LOGD("num_cameras=%d\n", g_cam_ctrl.num_cam);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : sort_camera_info
+ *
+ * DESCRIPTION: sort camera info to keep back cameras idx is smaller than front cameras idx
+ *
+ * PARAMETERS : number of cameras
+ *
+ * RETURN :
+ *==========================================================================*/
+void sort_camera_info(int num_cam)
+{
+ int idx = 0, i;
+ struct camera_info temp_info[MM_CAMERA_MAX_NUM_SENSORS];
+ cam_sync_type_t temp_type[MM_CAMERA_MAX_NUM_SENSORS];
+ cam_sync_mode_t temp_mode[MM_CAMERA_MAX_NUM_SENSORS];
+ uint8_t temp_is_yuv[MM_CAMERA_MAX_NUM_SENSORS];
+ char temp_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+
+ memset(temp_info, 0, sizeof(temp_info));
+ memset(temp_dev_name, 0, sizeof(temp_dev_name));
+ memset(temp_type, 0, sizeof(temp_type));
+ memset(temp_mode, 0, sizeof(temp_mode));
+ memset(temp_is_yuv, 0, sizeof(temp_is_yuv));
+
+ /* TODO: Need to revisit this logic if front AUX is available. */
+
+ /* firstly save the main back cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) &&
+ (g_cam_ctrl.cam_type[i] != CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Back Main Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ /* then save the front cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) &&
+ (g_cam_ctrl.cam_type[i] != CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Front Main Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ /* save the aux back cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) &&
+ (g_cam_ctrl.cam_type[i] == CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Back Aux Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ //TODO: Need to revisit this logic if front AUX is available.
+ /* save the aux front cameras info*/
+ for (i = 0; i < num_cam; i++) {
+ if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) &&
+ (g_cam_ctrl.cam_type[i] == CAM_TYPE_AUX)) {
+ temp_info[idx] = g_cam_ctrl.info[i];
+ temp_type[idx] = g_cam_ctrl.cam_type[i];
+ temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+ temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+ LOGD("Found Front Aux Camera: i: %d idx: %d", i, idx);
+ memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+ MM_CAMERA_DEV_NAME_LEN);
+ }
+ }
+
+ if (idx <= num_cam) {
+ memcpy(g_cam_ctrl.info, temp_info, sizeof(temp_info));
+ memcpy(g_cam_ctrl.cam_type, temp_type, sizeof(temp_type));
+ memcpy(g_cam_ctrl.cam_mode, temp_mode, sizeof(temp_mode));
+ memcpy(g_cam_ctrl.is_yuv, temp_is_yuv, sizeof(temp_is_yuv));
+ memcpy(g_cam_ctrl.video_dev_name, temp_dev_name, sizeof(temp_dev_name));
+ //Set num cam based on the cameras exposed finally via dual/aux properties.
+ g_cam_ctrl.num_cam = idx;
+ for (i = 0; i < idx; i++) {
+ LOGI("Camera id: %d facing: %d, type: %d is_yuv: %d",
+ i, g_cam_ctrl.info[i].facing, g_cam_ctrl.cam_type[i], g_cam_ctrl.is_yuv[i]);
+ }
+ }
+ LOGI("Number of cameras %d sorted %d", num_cam, idx);
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION : get_num_of_cameras
+ *
+ * DESCRIPTION: get number of cameras
+ *
+ * PARAMETERS :
+ *
+ * RETURN : number of cameras supported
+ *==========================================================================*/
+uint8_t get_num_of_cameras()
+{
+ int rc = 0;
+ int dev_fd = -1;
+ struct media_device_info mdev_info;
+ int num_media_devices = 0;
+ int8_t num_cameras = 0;
+ char subdev_name[32];
+ int32_t sd_fd = -1;
+ struct sensor_init_cfg_data cfg;
+ char prop[PROPERTY_VALUE_MAX];
+
+ LOGD("E");
+
+ property_get("vold.decrypt", prop, "0");
+ int decrypt = atoi(prop);
+ if (decrypt == 1)
+ return 0;
+
+ /* lock the mutex */
+ pthread_mutex_lock(&g_intf_lock);
+
+ while (1) {
+ uint32_t num_entities = 1U;
+ char dev_name[32];
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ LOGD("Done discovering media devices\n");
+ break;
+ }
+ num_media_devices++;
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ dev_fd = -1;
+ break;
+ }
+
+ if (strncmp(mdev_info.model, MSM_CONFIGURATION_NAME,
+ sizeof(mdev_info.model)) != 0) {
+ close(dev_fd);
+ dev_fd = -1;
+ continue;
+ }
+
+ while (1) {
+ struct media_entity_desc entity;
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ LOGD("entity id %d", entity.id);
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ LOGD("Done enumerating media entities");
+ rc = 0;
+ break;
+ }
+ LOGD("entity name %s type %d group id %d",
+ entity.name, entity.type, entity.group_id);
+ if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+ entity.group_id == MSM_CAMERA_SUBDEV_SENSOR_INIT) {
+ snprintf(subdev_name, sizeof(dev_name), "/dev/%s", entity.name);
+ break;
+ }
+ }
+ close(dev_fd);
+ dev_fd = -1;
+ }
+
+ /* Open sensor_init subdev */
+ sd_fd = open(subdev_name, O_RDWR);
+ if (sd_fd < 0) {
+ LOGE("Open sensor_init subdev failed");
+ return FALSE;
+ }
+
+ cfg.cfgtype = CFG_SINIT_PROBE_WAIT_DONE;
+ cfg.cfg.setting = NULL;
+ if (ioctl(sd_fd, VIDIOC_MSM_SENSOR_INIT_CFG, &cfg) < 0) {
+ LOGE("failed");
+ }
+ close(sd_fd);
+ dev_fd = -1;
+
+
+ num_media_devices = 0;
+ while (1) {
+ uint32_t num_entities = 1U;
+ char dev_name[32];
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ LOGD("Done discovering media devices: %s\n", strerror(errno));
+ break;
+ }
+ num_media_devices++;
+ memset(&mdev_info, 0, sizeof(mdev_info));
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ dev_fd = -1;
+ num_cameras = 0;
+ break;
+ }
+
+ if(strncmp(mdev_info.model, MSM_CAMERA_NAME, sizeof(mdev_info.model)) != 0) {
+ close(dev_fd);
+ dev_fd = -1;
+ continue;
+ }
+
+ while (1) {
+ struct media_entity_desc entity;
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ LOGD("Done enumerating media entities\n");
+ rc = 0;
+ break;
+ }
+ if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+ strlcpy(g_cam_ctrl.video_dev_name[num_cameras],
+ entity.name, sizeof(entity.name));
+ LOGI("dev_info[id=%d,name='%s']\n",
+ (int)num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+ num_cameras++;
+ break;
+ }
+ }
+ close(dev_fd);
+ dev_fd = -1;
+ if (num_cameras >= MM_CAMERA_MAX_NUM_SENSORS) {
+ LOGW("Maximum number of camera reached %d", num_cameras);
+ break;
+ }
+ }
+ g_cam_ctrl.num_cam = num_cameras;
+
+ get_sensor_info();
+ sort_camera_info(g_cam_ctrl.num_cam);
+ /* unlock the mutex */
+ pthread_mutex_unlock(&g_intf_lock);
+ LOGI("num_cameras=%d\n", (int)g_cam_ctrl.num_cam);
+ return(uint8_t)g_cam_ctrl.num_cam;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_process_advanced_capture
+ *
+ * DESCRIPTION: Configures channel advanced capture mode
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @type : advanced capture type
+ * @ch_id : channel handle
+ * @trigger : 1 for start and 0 for cancel/stop
+ * @value : input capture configaration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_process_advanced_capture(uint32_t camera_handle,
+ uint32_t ch_id, mm_camera_advanced_capture_t type,
+ int8_t trigger, void *in_value)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E camera_handler = %d,ch_id = %d",
+ camera_handle, ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_channel_advanced_capture(my_obj, ch_id, type,
+ (uint32_t)trigger, in_value);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ LOGD("X ");
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_intf_register_stream_buf_cb
+ *
+ * DESCRIPTION: Register special callback for stream buffer
+ *
+ * PARAMETERS :
+ * @camera_handle: camera handle
+ * @ch_id : channel handle
+ * @stream_id : stream handle
+ * @buf_cb : callback function
+ * @buf_type :SYNC/ASYNC
+ * @userdata : userdata pointer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * 1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_stream_buf_cb(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+ mm_camera_stream_cb_type cb_type, void *userdata)
+{
+ int32_t rc = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ LOGD("E handle = %u ch_id = %u",
+ camera_handle, ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_reg_stream_buf_cb(my_obj, ch_id, stream_id,
+ buf_cb, cb_type, userdata);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return (int32_t)rc;
+}
+
+struct camera_info *get_cam_info(uint32_t camera_id, cam_sync_type_t *pCamType)
+{
+ *pCamType = g_cam_ctrl.cam_type[camera_id];
+ return &g_cam_ctrl.info[camera_id];
+}
+
+uint8_t is_yuv_sensor(uint32_t camera_id)
+{
+ return g_cam_ctrl.is_yuv[camera_id];
+}
+
+/* camera ops v-table */
+static mm_camera_ops_t mm_camera_ops = {
+ .query_capability = mm_camera_intf_query_capability,
+ .register_event_notify = mm_camera_intf_register_event_notify,
+ .close_camera = mm_camera_intf_close,
+ .set_parms = mm_camera_intf_set_parms,
+ .get_parms = mm_camera_intf_get_parms,
+ .do_auto_focus = mm_camera_intf_do_auto_focus,
+ .cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
+ .prepare_snapshot = mm_camera_intf_prepare_snapshot,
+ .start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
+ .stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
+ .map_buf = mm_camera_intf_map_buf,
+ .map_bufs = mm_camera_intf_map_bufs,
+ .unmap_buf = mm_camera_intf_unmap_buf,
+ .add_channel = mm_camera_intf_add_channel,
+ .delete_channel = mm_camera_intf_del_channel,
+ .get_bundle_info = mm_camera_intf_get_bundle_info,
+ .add_stream = mm_camera_intf_add_stream,
+ .link_stream = mm_camera_intf_link_stream,
+ .delete_stream = mm_camera_intf_del_stream,
+ .config_stream = mm_camera_intf_config_stream,
+ .qbuf = mm_camera_intf_qbuf,
+ .get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
+ .map_stream_buf = mm_camera_intf_map_stream_buf,
+ .map_stream_bufs = mm_camera_intf_map_stream_bufs,
+ .unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
+ .set_stream_parms = mm_camera_intf_set_stream_parms,
+ .get_stream_parms = mm_camera_intf_get_stream_parms,
+ .start_channel = mm_camera_intf_start_channel,
+ .stop_channel = mm_camera_intf_stop_channel,
+ .request_super_buf = mm_camera_intf_request_super_buf,
+ .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
+ .flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
+ .configure_notify_mode = mm_camera_intf_configure_notify_mode,
+ .process_advanced_capture = mm_camera_intf_process_advanced_capture,
+ .get_session_id = mm_camera_intf_get_session_id,
+ .sync_related_sensors = mm_camera_intf_sync_related_sensors,
+ .flush = mm_camera_intf_flush,
+ .register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb
+};
+
+/*===========================================================================
+ * FUNCTION : camera_open
+ *
+ * DESCRIPTION: open a camera by camera index
+ *
+ * PARAMETERS :
+ * @camera_idx : camera index. should within range of 0 to num_of_cameras
+ * @camera_vtbl : ptr to a virtual table containing camera handle and operation table.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * non-zero error code -- failure
+ *==========================================================================*/
+int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl)
+{
+ int32_t rc = 0;
+ mm_camera_obj_t *cam_obj = NULL;
+
+#ifdef QCAMERA_REDEFINE_LOG
+ mm_camera_set_dbg_log_properties();
+#endif
+
+ LOGD("E camera_idx = %d\n", camera_idx);
+ if (camera_idx >= g_cam_ctrl.num_cam) {
+ LOGE("Invalid camera_idx (%d)", camera_idx);
+ return -EINVAL;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ /* opened already */
+ if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
+ /* Add reference */
+ g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
+ pthread_mutex_unlock(&g_intf_lock);
+ LOGD("opened alreadyn");
+ *camera_vtbl = &g_cam_ctrl.cam_obj[camera_idx]->vtbl;
+ return rc;
+ }
+
+ cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+ if(NULL == cam_obj) {
+ pthread_mutex_unlock(&g_intf_lock);
+ LOGE("no mem");
+ return -EINVAL;
+ }
+
+ /* initialize camera obj */
+ memset(cam_obj, 0, sizeof(mm_camera_obj_t));
+ cam_obj->ctrl_fd = -1;
+ cam_obj->ds_fd = -1;
+ cam_obj->ref_count++;
+ cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
+ cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
+ cam_obj->vtbl.ops = &mm_camera_ops;
+ pthread_mutex_init(&cam_obj->cam_lock, NULL);
+ /* unlock global interface lock, if not, in dual camera use case,
+ * current open will block operation of another opened camera obj*/
+ pthread_mutex_lock(&cam_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+
+ rc = mm_camera_open(cam_obj);
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (rc != 0) {
+ LOGE("mm_camera_open err = %d", rc);
+ pthread_mutex_destroy(&cam_obj->cam_lock);
+ g_cam_ctrl.cam_obj[camera_idx] = NULL;
+ free(cam_obj);
+ cam_obj = NULL;
+ pthread_mutex_unlock(&g_intf_lock);
+ *camera_vtbl = NULL;
+ return rc;
+ } else {
+ LOGD("Open succeded\n");
+ g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
+ pthread_mutex_unlock(&g_intf_lock);
+ *camera_vtbl = &cam_obj->vtbl;
+ return 0;
+ }
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
new file mode 100644
index 0000000..85a5d3b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -0,0 +1,294 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_create
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ * @cam_id : camera ID
+ * @sock_type: socket type, TCP/UDP
+ *
+ * RETURN : fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+ int socket_fd;
+ mm_camera_sock_addr_t sock_addr;
+ int sktype;
+ int rc;
+
+ switch (sock_type)
+ {
+ case MM_CAMERA_SOCK_TYPE_UDP:
+ sktype = SOCK_DGRAM;
+ break;
+ case MM_CAMERA_SOCK_TYPE_TCP:
+ sktype = SOCK_STREAM;
+ break;
+ default:
+ LOGE("unknown socket type =%d", sock_type);
+ return -1;
+ }
+ socket_fd = socket(AF_UNIX, sktype, 0);
+ if (socket_fd < 0) {
+ LOGE("error create socket fd =%d", socket_fd);
+ return socket_fd;
+ }
+
+ memset(&sock_addr, 0, sizeof(sock_addr));
+ sock_addr.addr_un.sun_family = AF_UNIX;
+ snprintf(sock_addr.addr_un.sun_path,
+ UNIX_PATH_MAX, QCAMERA_DUMP_FRM_LOCATION"cam_socket%d", cam_id);
+ rc = connect(socket_fd, &sock_addr.addr, sizeof(sock_addr.addr_un));
+ if (0 != rc) {
+ close(socket_fd);
+ socket_fd = -1;
+ LOGE("socket_fd=%d %s ", socket_fd, strerror(errno));
+ }
+
+ LOGD("socket_fd=%d %s", socket_fd,
+ sock_addr.addr_un.sun_path);
+ return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_close
+ *
+ * DESCRIPTION: close domain socket by its fd
+ * @fd : file descriptor for the domain socket to be closed
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+ if (fd >= 0) {
+ close(fd);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_sendmsg
+ *
+ * DESCRIPTION: send msg through domain socket
+ * @fd : socket fd
+ * @msg : pointer to msg to be sent over domain socket
+ * @sendfd : file descriptors to be sent
+ *
+ * RETURN : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ size_t buf_size,
+ int sendfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr * cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+
+ if (msg == NULL) {
+ LOGD("msg is NULL");
+ return -1;
+ }
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+ LOGD("iov_len=%llu",
+ (unsigned long long int)iov[0].iov_len);
+
+ msgh.msg_control = NULL;
+ msgh.msg_controllen = 0;
+
+ /* if sendfd is valid, we need to pass it through control msg */
+ if( sendfd >= 0) {
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+ cmsghp = CMSG_FIRSTHDR(&msgh);
+ if (cmsghp != NULL) {
+ LOGD("Got ctrl msg pointer");
+ cmsghp->cmsg_level = SOL_SOCKET;
+ cmsghp->cmsg_type = SCM_RIGHTS;
+ cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+ *((int *)CMSG_DATA(cmsghp)) = sendfd;
+ LOGD("cmsg data=%d", *((int *) CMSG_DATA(cmsghp)));
+ } else {
+ LOGD("ctrl msg NULL");
+ return -1;
+ }
+ }
+
+ return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_bundle_sendmsg
+ *
+ * DESCRIPTION: send msg through domain socket
+ * @fd : socket fd
+ * @msg : pointer to msg to be sent over domain socket
+ * @sendfds : file descriptors to be sent
+ * @numfds : num of file descriptors to be sent
+ *
+ * RETURN : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_bundle_sendmsg(
+ int fd,
+ void *msg,
+ size_t buf_size,
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+ int numfds)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr * cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int) * numfds)];
+ int *fds_ptr = NULL;
+
+ if (msg == NULL) {
+ LOGD("msg is NULL");
+ return -1;
+ }
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+ LOGD("iov_len=%llu",
+ (unsigned long long int)iov[0].iov_len);
+
+ msgh.msg_control = NULL;
+ msgh.msg_controllen = 0;
+
+ /* if numfds is valid, we need to pass it through control msg */
+ if (numfds > 0) {
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+ cmsghp = CMSG_FIRSTHDR(&msgh);
+ if (cmsghp != NULL) {
+ cmsghp->cmsg_level = SOL_SOCKET;
+ cmsghp->cmsg_type = SCM_RIGHTS;
+ cmsghp->cmsg_len = CMSG_LEN(sizeof(int) * numfds);
+
+ fds_ptr = (int*) CMSG_DATA(cmsghp);
+ memcpy(fds_ptr, sendfds, sizeof(int) * numfds);
+ } else {
+ LOGE("ctrl msg NULL");
+ return -1;
+ }
+ }
+
+ return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_socket_recvmsg
+ *
+ * DESCRIPTION: receive msg from domain socket.
+ * @fd : socket fd
+ * @msg : pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ * need be allocated by the caller
+ * @buf_size: the size of the buf that holds incoming msg
+ * @rcvdfd : pointer to hold recvd file descriptor if not NULL.
+ *
+ * RETURN : the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr *cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+ int rcvd_fd = -1;
+ int rcvd_len = 0;
+
+ if ( (msg == NULL) || (buf_size <= 0) ) {
+ LOGE("msg buf is NULL");
+ return -1;
+ }
+
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+
+ if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+ LOGE("recvmsg failed");
+ return rcvd_len;
+ }
+
+ LOGD("msg_ctrl %p len %zd", msgh.msg_control,
+ msgh.msg_controllen);
+
+ if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+ (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+ if (cmsghp->cmsg_level == SOL_SOCKET &&
+ cmsghp->cmsg_type == SCM_RIGHTS) {
+ LOGD("CtrlMsg is valid");
+ rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+ LOGD("Receieved fd=%d", rcvd_fd);
+ } else {
+ LOGE("Unexpected Control Msg. Line=%d");
+ }
+ }
+
+ if (rcvdfd) {
+ *rcvdfd = rcvd_fd;
+ }
+
+ return rcvd_len;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
new file mode 100644
index 0000000..c187fb3
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -0,0 +1,4581 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <stdlib.h>
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <media/msm_media_info.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+/* internal function decalre */
+int32_t mm_stream_qbuf(mm_stream_t *my_obj,
+ mm_camera_buf_def_t *buf);
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj);
+int32_t mm_stream_set_fmt(mm_stream_t * my_obj);
+int32_t mm_stream_sync_info(mm_stream_t *my_obj);
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_request_buf(mm_stream_t * my_obj);
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_release(mm_stream_t *my_obj);
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *value);
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *value);
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+ void *in_value);
+int32_t mm_stream_streamon(mm_stream_t *my_obj);
+int32_t mm_stream_streamoff(mm_stream_t *my_obj);
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info,
+ uint8_t num_planes);
+int32_t mm_stream_read_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info);
+int32_t mm_stream_write_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *buf);
+
+int32_t mm_stream_config(mm_stream_t *my_obj,
+ mm_camera_stream_config_t *config);
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *frame);
+int32_t mm_stream_get_queued_buf_count(mm_stream_t * my_obj);
+
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj);
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *plns);
+uint32_t mm_stream_calc_lcm(int32_t num1, int32_t num2);
+
+
+/* state machine function declare */
+int32_t mm_stream_fsm_inited(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_acquired(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt);
+
+
+/*===========================================================================
+ * FUNCTION : mm_stream_notify_channel
+ *
+ * DESCRIPTION: function to notify channel object on received buffer
+ *
+ * PARAMETERS :
+ * @ch_obj : channel object
+ * @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * 0> -- failure
+ *==========================================================================*/
+int32_t mm_stream_notify_channel(struct mm_channel* ch_obj,
+ mm_camera_buf_info_t *buf_info)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ if ((NULL == ch_obj) || (NULL == buf_info)) {
+ LOGD("Invalid channel/buffer");
+ return -ENODEV;
+ }
+
+ /* send cam_sem_post to wake up channel cmd thread to enqueue
+ * to super buffer */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+ node->u.buf = *buf_info;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(ch_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(ch_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ rc = -ENOMEM;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_handle_rcvd_buf
+ *
+ * DESCRIPTION: function to handle newly received stream buffer
+ *
+ * PARAMETERS :
+ * @cam_obj : stream object
+ * @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN : none
+ *==========================================================================*/
+void mm_stream_handle_rcvd_buf(mm_stream_t *my_obj,
+ mm_camera_buf_info_t *buf_info,
+ uint8_t has_cb)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* enqueue to super buf thread */
+ if (my_obj->is_bundled) {
+ rc = mm_stream_notify_channel(my_obj->ch_obj, buf_info);
+ if (rc < 0) {
+ LOGE("Unable to notify channel");
+ }
+ }
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if(my_obj->is_linked) {
+ /* need to add into super buf for linking, add ref count */
+ my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+
+ rc = mm_stream_notify_channel(my_obj->linked_obj, buf_info);
+ if (rc < 0) {
+ LOGE("Unable to notify channel");
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if(has_cb && my_obj->cmd_thread.is_active) {
+ mm_camera_cmdcb_t* node = NULL;
+
+ /* send cam_sem_post to wake up cmd thread to dispatch dataCB */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+ node->u.buf = *buf_info;
+
+ /* enqueue to cmd thread */
+ cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ LOGE("No memory for mm_camera_node_t");
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_dispatch_sync_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users on poll thread
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing stream buffer information
+ * @userdata: user data ptr (stream object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_stream_dispatch_sync_data(mm_stream_t * my_obj,
+ mm_stream_data_cb_t *buf_cb, mm_camera_buf_info_t *buf_info)
+{
+ mm_camera_super_buf_t super_buf;
+
+ if (NULL == my_obj || buf_info == NULL ||
+ buf_cb == NULL) {
+ return;
+ }
+
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = 1;
+ super_buf.bufs[0] = buf_info->buf;
+ super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = my_obj->ch_obj->my_hdl;
+ if ((buf_cb != NULL) && (buf_cb->cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC)
+ && (buf_cb->cb_count != 0)) {
+ /* callback */
+ buf_cb->cb(&super_buf, buf_cb->user_data);
+
+ /* if >0, reduce count by 1 every time we called CB until reaches 0
+ * when count reach 0, reset the buf_cb to have no CB */
+ if (buf_cb->cb_count > 0) {
+ buf_cb->cb_count--;
+ if (0 == buf_cb->cb_count) {
+ buf_cb->cb = NULL;
+ buf_cb->user_data = NULL;
+ }
+ }
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_data_notify
+ *
+ * DESCRIPTION: callback to handle data notify from kernel
+ *
+ * PARAMETERS :
+ * @user_data : user data ptr (stream object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_stream_data_notify(void* user_data)
+{
+ mm_stream_t *my_obj = (mm_stream_t*)user_data;
+ int32_t i, rc;
+ uint8_t has_cb = 0, length = 0;
+ mm_camera_buf_info_t buf_info;
+
+ if (NULL == my_obj) {
+ return;
+ }
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ if (MM_STREAM_STATE_ACTIVE != my_obj->state) {
+ /* this Cb will only received in active_stream_on state
+ * if not so, return here */
+ LOGE("ERROR!! Wrong state (%d) to receive data notify!",
+ my_obj->state);
+ return;
+ }
+
+ if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ length = 1;
+ } else {
+ length = my_obj->frame_offset.num_planes;
+ }
+
+ memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+ rc = mm_stream_read_msm_frame(my_obj, &buf_info,
+ (uint8_t)length);
+ if (rc != 0) {
+ return;
+ }
+ uint32_t idx = buf_info.buf->buf_idx;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb) {
+ if (my_obj->buf_cb[i].cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC) {
+ /*For every SYNC callback, send data*/
+ mm_stream_dispatch_sync_data(my_obj,
+ &my_obj->buf_cb[i], &buf_info);
+ } else {
+ /* for every ASYNC CB, need ref count */
+ has_cb = 1;
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ /* update buffer location */
+ my_obj->buf_status[idx].in_kernel = 0;
+
+ /* update buf ref count */
+ if (my_obj->is_bundled) {
+ /* need to add into super buf since bundled, add ref count */
+ my_obj->buf_status[idx].buf_refcnt++;
+ }
+ my_obj->buf_status[idx].buf_refcnt =
+ (uint8_t)(my_obj->buf_status[idx].buf_refcnt + has_cb);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_dispatch_app_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users
+ *
+ * PARAMETERS :
+ * @cmd_cb : ptr storing stream buffer information
+ * @userdata: user data ptr (stream object)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_stream_t * my_obj = (mm_stream_t *)user_data;
+ mm_camera_buf_info_t* buf_info = NULL;
+ mm_camera_super_buf_t super_buf;
+
+ if (NULL == my_obj) {
+ return;
+ }
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
+ LOGE("Wrong cmd_type (%d) for dataCB",
+ cmd_cb->cmd_type);
+ return;
+ }
+
+ buf_info = &cmd_cb->u.buf;
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = 1;
+ super_buf.bufs[0] = buf_info->buf;
+ super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = my_obj->ch_obj->my_hdl;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb
+ && (my_obj->buf_cb[i].cb_type !=
+ MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
+ if (my_obj->buf_cb[i].cb_count != 0) {
+ /* if <0, means infinite CB
+ * if >0, means CB for certain times
+ * both case we need to call CB */
+
+ /* increase buf ref cnt */
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ /* callback */
+ my_obj->buf_cb[i].cb(&super_buf,
+ my_obj->buf_cb[i].user_data);
+ }
+
+ /* if >0, reduce count by 1 every time we called CB until reaches 0
+ * when count reach 0, reset the buf_cb to have no CB */
+ if (my_obj->buf_cb[i].cb_count > 0) {
+ my_obj->buf_cb[i].cb_count--;
+ if (0 == my_obj->buf_cb[i].cb_count) {
+ my_obj->buf_cb[i].cb = NULL;
+ my_obj->buf_cb[i].user_data = NULL;
+ }
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ /* do buf_done since we increased refcnt by one when has_cb */
+ mm_stream_buf_done(my_obj, buf_info->buf);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_fn
+ *
+ * DESCRIPTION: stream finite state machine entry function. Depends on stream
+ * state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch (my_obj->state) {
+ case MM_STREAM_STATE_NOTUSED:
+ LOGD("Not handling evt in unused state");
+ break;
+ case MM_STREAM_STATE_INITED:
+ rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACQUIRED:
+ rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_CFG:
+ rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_BUFFED:
+ rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_REG:
+ rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACTIVE:
+ rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
+ break;
+ default:
+ LOGD("Not a valid state (%d)", my_obj->state);
+ break;
+ }
+ LOGD("X rc =%d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_inited
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in INITED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+ const char *dev_name_value = NULL;
+ if (NULL == my_obj) {
+ LOGE("NULL camera object\n");
+ return -1;
+ }
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_ACQUIRE:
+ if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
+ LOGE("NULL channel or camera obj\n");
+ rc = -1;
+ break;
+ }
+
+ dev_name_value = mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl);
+ if (NULL == dev_name_value) {
+ LOGE("NULL device name\n");
+ rc = -1;
+ break;
+ }
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+ dev_name_value);
+
+ my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (my_obj->fd < 0) {
+ LOGE("open dev returned %d\n", my_obj->fd);
+ rc = -1;
+ break;
+ }
+ LOGD("open dev fd = %d\n", my_obj->fd);
+ rc = mm_stream_set_ext_mode(my_obj);
+ if (0 == rc) {
+ my_obj->state = MM_STREAM_STATE_ACQUIRED;
+ } else {
+ /* failed setting ext_mode
+ * close fd */
+ close(my_obj->fd);
+ my_obj->fd = -1;
+ break;
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_acquired
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in AQUIRED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_acquired(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_SET_FMT:
+ {
+ mm_camera_stream_config_t *config =
+ (mm_camera_stream_config_t *)in_val;
+
+ rc = mm_stream_config(my_obj, config);
+
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+
+ break;
+ }
+ case MM_STREAM_EVT_RELEASE:
+ rc = mm_stream_release(my_obj);
+ /* change state to not used */
+ my_obj->state = MM_STREAM_STATE_NOTUSED;
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_cfg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in CONFIGURED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_SET_FMT:
+ {
+ mm_camera_stream_config_t *config =
+ (mm_camera_stream_config_t *)in_val;
+
+ rc = mm_stream_config(my_obj, config);
+
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+
+ break;
+ }
+ case MM_STREAM_EVT_RELEASE:
+ rc = mm_stream_release(my_obj);
+ my_obj->state = MM_STREAM_STATE_NOTUSED;
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_BUF:
+ rc = mm_stream_init_bufs(my_obj);
+ /* change state to buff allocated */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_BUFFED;
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_buffed
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in BUFFED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_PUT_BUF:
+ rc = mm_stream_deinit_bufs(my_obj);
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+ break;
+ case MM_STREAM_EVT_REG_BUF:
+ rc = mm_stream_reg_buf(my_obj);
+ /* change state to regged */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_reg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in REGGED
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ switch(evt) {
+ case MM_STREAM_EVT_UNREG_BUF:
+ rc = mm_stream_unreg_buf(my_obj);
+
+ /* change state to buffed */
+ my_obj->state = MM_STREAM_STATE_BUFFED;
+ break;
+ case MM_STREAM_EVT_START:
+ {
+ uint8_t has_cb = 0;
+ uint8_t i;
+ /* launch cmd thread if CB is not null */
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if((NULL != my_obj->buf_cb[i].cb) &&
+ (my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
+ has_cb = 1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if (has_cb) {
+ snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+ mm_stream_dispatch_app_data,
+ (void *)my_obj);
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+
+ my_obj->state = MM_STREAM_STATE_ACTIVE;
+ rc = mm_stream_streamon(my_obj);
+ if (0 != rc) {
+ /* failed stream on, need to release cmd thread if it's launched */
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if (has_cb) {
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+ my_obj->state = MM_STREAM_STATE_REG;
+ break;
+ }
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_fsm_active
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in ACTIVE
+ * state.
+ *
+ * PARAMETERS :
+ * @my_obj : ptr to a stream object
+ * @evt : stream event to be processed
+ * @in_val : input event payload. Can be NULL if not needed.
+ * @out_val : output payload, Can be NULL if not needed.
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ switch(evt) {
+ case MM_STREAM_EVT_QBUF:
+ rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+ break;
+ case MM_STREAM_EVT_GET_QUEUED_BUF_COUNT:
+ rc = mm_stream_get_queued_buf_count(my_obj);
+ break;
+ case MM_STREAM_EVT_STOP:
+ {
+ uint8_t has_cb = 0;
+ uint8_t i;
+ rc = mm_stream_streamoff(my_obj);
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb
+ && my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC) {
+ has_cb = 1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ pthread_mutex_lock(&my_obj->cmd_lock);
+ if (has_cb) {
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+ }
+ pthread_mutex_unlock(&my_obj->cmd_lock);
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_set_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ {
+ mm_evt_paylod_set_get_stream_parms_t *payload =
+ (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+ rc = mm_stream_get_parm(my_obj, payload->parms);
+ }
+ break;
+ case MM_STREAM_EVT_DO_ACTION:
+ rc = mm_stream_do_action(my_obj, in_val);
+ break;
+ default:
+ LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+ my_obj->state, evt, in_val, out_val);
+ }
+ LOGD("X rc = %d", rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping stream buffer via domain socket to server.
+ * This function will be passed to upper layer as part of ops table
+ * to be used by upper layer when allocating stream buffers and mapping
+ * buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ * @userdata : user data ptr (stream object)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_map_buf_ops(uint32_t frame_idx,
+ int32_t plane_idx,
+ int fd,
+ size_t size,
+ cam_mapping_buf_type type,
+ void *userdata)
+{
+ mm_stream_t *my_obj = (mm_stream_t *)userdata;
+ return mm_stream_map_buf(my_obj,
+ type,
+ frame_idx, plane_idx, fd, size);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_bundled_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping bundled stream buffers via domain socket to server.
+ * This function will be passed to upper layer as part of ops table
+ * to be used by upper layer when allocating stream buffers and mapping
+ * buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ * @buf_map_list : list of buffer mapping information
+ * @userdata : user data ptr (stream object)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_bundled_map_buf_ops(
+ const cam_buf_map_type_list *buf_map_list,
+ void *userdata)
+{
+ mm_stream_t *my_obj = (mm_stream_t *)userdata;
+ return mm_stream_map_bufs(my_obj,
+ buf_map_list);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_unmap_buf_ops
+ *
+ * DESCRIPTION: ops for unmapping stream buffer via domain socket to server.
+ * This function will be passed to upper layer as part of ops table
+ * to be used by upper layer when allocating stream buffers and unmapping
+ * buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @userdata : user data ptr (stream object)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_unmap_buf_ops(uint32_t frame_idx,
+ int32_t plane_idx,
+ cam_mapping_buf_type type,
+ void *userdata)
+{
+ mm_stream_t *my_obj = (mm_stream_t *)userdata;
+ return mm_stream_unmap_buf(my_obj,
+ type,
+ frame_idx,
+ plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_config
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @config : stream configuration
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_config(mm_stream_t *my_obj,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = 0;
+ int32_t cb_index = 0;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ my_obj->stream_info = config->stream_info;
+ my_obj->buf_num = (uint8_t) config->stream_info->num_bufs;
+ my_obj->mem_vtbl = config->mem_vtbl;
+ my_obj->padding_info = config->padding_info;
+
+ if (config->stream_cb_sync != NULL) {
+ /* SYNC callback is always placed at index 0*/
+ my_obj->buf_cb[cb_index].cb = config->stream_cb_sync;
+ my_obj->buf_cb[cb_index].user_data = config->userdata;
+ my_obj->buf_cb[cb_index].cb_count = -1; /* infinite by default */
+ my_obj->buf_cb[cb_index].cb_type = MM_CAMERA_STREAM_CB_TYPE_SYNC;
+ cb_index++;
+ }
+ my_obj->buf_cb[cb_index].cb = config->stream_cb;
+ my_obj->buf_cb[cb_index].user_data = config->userdata;
+ my_obj->buf_cb[cb_index].cb_count = -1; /* infinite by default */
+ my_obj->buf_cb[cb_index].cb_type = MM_CAMERA_STREAM_CB_TYPE_ASYNC;
+
+ rc = mm_stream_sync_info(my_obj);
+ if (rc == 0) {
+ rc = mm_stream_set_fmt(my_obj);
+ if (rc < 0) {
+ LOGE("mm_stream_set_fmt failed %d",
+ rc);
+ }
+ }
+
+ my_obj->map_ops.map_ops = mm_stream_map_buf_ops;
+ my_obj->map_ops.bundled_map_ops = mm_stream_bundled_map_buf_ops;
+ my_obj->map_ops.unmap_ops = mm_stream_unmap_buf_ops;
+ my_obj->map_ops.userdata = my_obj;
+
+ if(my_obj->mem_vtbl.set_config_ops != NULL) {
+ my_obj->mem_vtbl.set_config_ops(&my_obj->map_ops,
+ my_obj->mem_vtbl.user_data);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_release
+ *
+ * DESCRIPTION: release a stream resource
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_release(mm_stream_t *my_obj)
+{
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ memset(my_obj->buf_status, 0, sizeof(my_obj->buf_status));
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ /* close fd */
+ if(my_obj->fd >= 0)
+ {
+ close(my_obj->fd);
+ }
+
+ /* destroy mutex */
+ pthread_cond_destroy(&my_obj->buf_cond);
+ pthread_mutex_destroy(&my_obj->buf_lock);
+ pthread_mutex_destroy(&my_obj->cb_lock);
+ pthread_mutex_destroy(&my_obj->cmd_lock);
+
+ /* reset stream obj */
+ memset(my_obj, 0, sizeof(mm_stream_t));
+ my_obj->fd = -1;
+
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_streamon
+ *
+ * DESCRIPTION: stream on a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamon(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ int8_t i;
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for (i = 0; i < my_obj->buf_num; i++) {
+ if ((my_obj->buf_status[i].map_status == 0) &&
+ (my_obj->buf_status[i].in_kernel)) {
+ LOGD("waiting for mapping to done: strm fd = %d",
+ my_obj->fd);
+ struct timespec ts;
+ clock_gettime(CLOCK_REALTIME, &ts);
+ ts.tv_sec += WAIT_TIMEOUT;
+ rc = pthread_cond_timedwait(&my_obj->buf_cond, &my_obj->buf_lock, &ts);
+ if (rc == ETIMEDOUT) {
+ LOGE("Timed out. Abort stream-on \n");
+ rc = -1;
+ }
+ break;
+ } else if (my_obj->buf_status[i].map_status < 0) {
+ LOGD("Buffer mapping failed. Abort Stream On");
+ rc = -1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ if (rc < 0) {
+ /* remove fd from data poll thread in case of failure */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_sync_call);
+ return rc;
+ }
+
+ rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
+ if (rc < 0) {
+ LOGE("ioctl VIDIOC_STREAMON failed: rc=%d, errno %d",
+ rc, errno);
+ /* remove fd from data poll thread in case of failure */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl, mm_camera_sync_call);
+ }
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_streamoff
+ *
+ * DESCRIPTION: stream off a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamoff(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* step1: remove fd from data poll thread */
+ rc = mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_sync_call);
+ if (rc < 0) {
+ /* The error might be due to async update. In this case
+ * wait for all updates to complete before proceeding. */
+ rc = mm_camera_poll_thread_commit_updates(&my_obj->ch_obj->poll_thread[0]);
+ if (rc < 0) {
+ LOGE("Poll sync failed %d",
+ rc);
+ }
+ }
+
+ /* step2: stream off */
+ rc = ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+ if (rc < 0) {
+ LOGE("STREAMOFF failed: %s\n",
+ strerror(errno));
+ }
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_write_user_buf
+ *
+ * DESCRIPTION: dequeue a stream buffer from user buffer queue and fill internal structure
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf : ptr to a struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_write_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = 0, i;
+ int32_t index = -1, count = 0;
+ struct msm_camera_user_buf_cont_t *cont_buf = NULL;
+
+ if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[buf->buf_idx].buf_refcnt--;
+ if (0 == my_obj->buf_status[buf->buf_idx].buf_refcnt) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[buf->buf_idx].buffer;
+ cont_buf->buf_cnt = my_obj->buf[buf->buf_idx].user_buf.bufs_used;
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ cont_buf->buf_idx[i] = my_obj->buf[buf->buf_idx].user_buf.buf_idx[i];
+ }
+ rc = mm_stream_qbuf(my_obj, buf);
+ if(rc < 0) {
+ LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ buf->buf_idx, rc);
+ } else {
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ my_obj->buf[buf->buf_idx].user_buf.buf_idx[i] = -1;
+ }
+ my_obj->buf_status[buf->buf_idx].in_kernel = 1;
+ my_obj->buf[buf->buf_idx].user_buf.buf_in_use = 1;
+ }
+ } else {
+ LOGD("<DEBUG> : ref count pending count :%d idx = %d",
+ my_obj->buf_status[buf->buf_idx].buf_refcnt, buf->buf_idx);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ return rc;
+ }
+
+ if ((my_obj->cur_buf_idx < 0)
+ || (my_obj->cur_buf_idx >= my_obj->buf_num)) {
+ for (i = 0; i < my_obj->buf_num; i++) {
+ if ((my_obj->buf_status[i].in_kernel)
+ || (my_obj->buf[i].user_buf.buf_in_use)) {
+ continue;
+ }
+
+ my_obj->cur_buf_idx = index = i;
+ break;
+ }
+ } else {
+ index = my_obj->cur_buf_idx;
+ }
+
+ if (index == -1) {
+ LOGE("No Free batch buffer");
+ rc = -1;
+ return rc;
+ }
+
+ //Insert Buffer to Batch structure.
+ my_obj->buf[index].user_buf.buf_idx[count] = buf->buf_idx;
+ my_obj->cur_bufs_staged++;
+
+ LOGD("index = %d filled = %d used = %d",
+ index,
+ my_obj->cur_bufs_staged,
+ my_obj->buf[index].user_buf.bufs_used);
+
+ if (my_obj->cur_bufs_staged
+ == my_obj->buf[index].user_buf.bufs_used){
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[index].buf_refcnt--;
+ if (0 == my_obj->buf_status[index].buf_refcnt) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[index].buffer;
+ cont_buf->buf_cnt = my_obj->buf[index].user_buf.bufs_used;
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ cont_buf->buf_idx[i] = my_obj->buf[index].user_buf.buf_idx[i];
+ }
+ rc = mm_stream_qbuf(my_obj, &my_obj->buf[index]);
+ if(rc < 0) {
+ LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ index, rc);
+ } else {
+ for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+ my_obj->buf[index].user_buf.buf_idx[i] = -1;
+ }
+ my_obj->buf_status[index].in_kernel = 1;
+ my_obj->buf[index].user_buf.buf_in_use = 1;
+ my_obj->cur_bufs_staged = 0;
+ my_obj->cur_buf_idx = -1;
+ }
+ }else{
+ LOGD("<DEBUG> : ref count pending count :%d idx = %d",
+ my_obj->buf_status[index].buf_refcnt, index);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_read_user_buf
+ *
+ * DESCRIPTION: dequeue a stream buffer from user buffer queue and fill internal structure
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_info : ptr to a struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_user_buf(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info)
+{
+ int32_t rc = 0, i;
+ mm_camera_buf_def_t *stream_buf = NULL;
+ struct msm_camera_user_buf_cont_t *user_buf = NULL;
+ nsecs_t interval_nsec = 0, frame_ts = 0, timeStamp = 0;
+ int ts_delta = 0;
+ uint32_t frameID = 0;
+
+ user_buf = (struct msm_camera_user_buf_cont_t *)buf_info->buf->buffer;
+
+ if(user_buf != my_obj->buf[buf_info->buf->buf_idx].buffer) {
+ LOGD("Buffer modified. ERROR");
+ rc = -1;
+ return rc;
+ }
+
+ if (buf_info->buf->frame_idx == 1) {
+ frameID = buf_info->buf->frame_idx;
+ }else {
+ frameID = (buf_info->buf->frame_idx - 1) * user_buf->buf_cnt;
+ }
+
+ timeStamp = (nsecs_t)(buf_info->buf->ts.tv_sec) *
+ 1000000000LL + buf_info->buf->ts.tv_nsec;
+
+ if (timeStamp <= my_obj->prev_timestamp) {
+ LOGE("TimeStamp received less than expected");
+ mm_stream_qbuf(my_obj, buf_info->buf);
+ return rc;
+ } else if (my_obj->prev_timestamp == 0
+ || (my_obj->prev_frameID != buf_info->buf->frame_idx + 1)) {
+ /* For first frame or incase batch is droped */
+ interval_nsec = ((my_obj->stream_info->user_buf_info.frameInterval) * 1000000);
+ my_obj->prev_timestamp = (timeStamp - (nsecs_t)(user_buf->buf_cnt * interval_nsec));
+ } else {
+ ts_delta = timeStamp - my_obj->prev_timestamp;
+ interval_nsec = (nsecs_t)(ts_delta / user_buf->buf_cnt);
+ LOGD("Timestamp delta = %d timestamp = %lld", ts_delta, timeStamp);
+ }
+
+ for (i = 0; i < (int32_t)user_buf->buf_cnt; i++) {
+ buf_info->buf->user_buf.buf_idx[i] = user_buf->buf_idx[i];
+ stream_buf = &my_obj->plane_buf[user_buf->buf_idx[i]];
+ stream_buf->frame_idx = frameID + i;
+
+ frame_ts = (i * interval_nsec) + my_obj->prev_timestamp;
+
+ stream_buf->ts.tv_sec = (frame_ts / 1000000000LL);
+ stream_buf->ts.tv_nsec = (frame_ts - (stream_buf->ts.tv_sec * 1000000000LL));
+ stream_buf->is_uv_subsampled = buf_info->buf->is_uv_subsampled;
+
+ LOGD("buf_index %d, frame_idx %d, stream type %d, timestamp = %lld",
+ stream_buf->buf_idx, stream_buf->frame_idx,
+ my_obj->stream_info->stream_type, frame_ts);
+ }
+
+ buf_info->buf->ts.tv_sec = (my_obj->prev_timestamp / 1000000000LL);
+ buf_info->buf->ts.tv_nsec = (my_obj->prev_timestamp -
+ (buf_info->buf->ts.tv_sec * 1000000000LL));
+
+ buf_info->buf->user_buf.bufs_used = user_buf->buf_cnt;
+ buf_info->buf->user_buf.buf_in_use = 1;
+
+ my_obj->prev_timestamp = timeStamp;
+ my_obj->prev_frameID = buf_info->buf->frame_idx;
+
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_read_msm_frame
+ *
+ * DESCRIPTION: dequeue a stream buffer from kernel queue
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_info : ptr to a struct storing buffer information
+ * @num_planes : number of planes in the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info,
+ uint8_t num_planes)
+{
+ int32_t rc = 0;
+ struct v4l2_buffer vb;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ memset(&vb, 0, sizeof(vb));
+ vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ vb.memory = V4L2_MEMORY_USERPTR;
+ vb.m.planes = &planes[0];
+ vb.length = num_planes;
+
+ rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
+ if (0 > rc) {
+ LOGE("VIDIOC_DQBUF ioctl call failed on stream type %d (rc=%d): %s",
+ my_obj->stream_info->stream_type, rc, strerror(errno));
+ } else {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->queued_buffer_count--;
+ if (0 == my_obj->queued_buffer_count) {
+ LOGH("Stoping poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_async_call);
+ LOGH("Stopped poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ uint32_t idx = vb.index;
+ buf_info->buf = &my_obj->buf[idx];
+ buf_info->frame_idx = vb.sequence;
+ buf_info->stream_id = my_obj->my_hdl;
+
+ buf_info->buf->stream_id = my_obj->my_hdl;
+ buf_info->buf->buf_idx = idx;
+ buf_info->buf->frame_idx = vb.sequence;
+ buf_info->buf->ts.tv_sec = vb.timestamp.tv_sec;
+ buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+ buf_info->buf->flags = vb.flags;
+
+ LOGH("VIDIOC_DQBUF buf_index %d, frame_idx %d, stream type %d, rc %d,"
+ "queued: %d, buf_type = %d flags = %d",
+ vb.index, buf_info->buf->frame_idx,
+ my_obj->stream_info->stream_type, rc,
+ my_obj->queued_buffer_count, buf_info->buf->buf_type,
+ buf_info->buf->flags);
+
+ buf_info->buf->is_uv_subsampled =
+ (vb.reserved == V4L2_PIX_FMT_NV14 || vb.reserved == V4L2_PIX_FMT_NV41);
+
+ if(buf_info->buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ mm_stream_read_user_buf(my_obj, buf_info);
+ }
+
+ if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
+ rc = my_obj->mem_vtbl.clean_invalidate_buf(idx,
+ my_obj->mem_vtbl.user_data);
+ if (0 > rc) {
+ LOGE("Clean invalidate cache failed on buffer index: %d",
+ idx);
+ }
+ } else {
+ LOGE("Clean invalidate cache op not supported");
+ }
+ }
+
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_set_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @in_value : ptr to a param struct to be set to server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be set
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *in_value)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (in_value != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+ if (rc < 0) {
+ LOGE("Failed to set stream parameter type = %d", in_value->type);
+ }
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_get_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @in_value : ptr to a param struct to be get from server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the parms struct buf is already mapped to server via
+ * domain socket. Corresponding fields of parameters to be get
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+ cam_stream_parm_buffer_t *in_value)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (in_value != NULL) {
+ rc = mm_camera_util_g_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_do_actions
+ *
+ * DESCRIPTION: request server to perform stream based actions
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @in_value : ptr to a struct of actions to be performed by the server
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Assume the action struct buf is already mapped to server via
+ * domain socket. Corresponding fields of actions to be performed
+ * are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+ void *in_value)
+{
+ int32_t rc = -1;
+ int32_t value = 0;
+ if (in_value != NULL) {
+ rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_set_ext_mode
+ *
+ * DESCRIPTION: set stream extended mode to server via v4l2 ioctl
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : Server will return a server stream id that uniquely identify
+ * this stream on server side. Later on communication to server
+ * per stream should use this server stream id.
+ *==========================================================================*/
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_streamparm s_parm;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ memset(&s_parm, 0, sizeof(s_parm));
+ s_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+ rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
+ LOGD("stream fd=%d, rc=%d, extended_mode=%d\n",
+ my_obj->fd, rc, s_parm.parm.capture.extendedmode);
+ if (rc == 0) {
+ /* get server stream id */
+ my_obj->server_stream_id = s_parm.parm.capture.extendedmode;
+ } else {
+ LOGE("VIDIOC_S_PARM failed %d, errno %d", rc, errno);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel queue for furture use
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf : ptr to a struct storing buffer information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
+{
+ int32_t rc = 0;
+ uint32_t length = 0;
+ struct v4l2_buffer buffer;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d, stream type = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state,
+ my_obj->stream_info->stream_type);
+
+ if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ LOGD("USERPTR num_buf = %d, idx = %d",
+ buf->user_buf.bufs_used, buf->buf_idx);
+ memset(&planes, 0, sizeof(planes));
+ planes[0].length = my_obj->stream_info->user_buf_info.size;
+ planes[0].m.userptr = buf->fd;
+ length = 1;
+ } else {
+ memcpy(planes, buf->planes_buf.planes, sizeof(planes));
+ length = buf->planes_buf.num_planes;
+ }
+
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ buffer.memory = V4L2_MEMORY_USERPTR;
+ buffer.index = (__u32)buf->buf_idx;
+ buffer.m.planes = &planes[0];
+ buffer.length = (__u32)length;
+
+ if ( NULL != my_obj->mem_vtbl.invalidate_buf ) {
+ rc = my_obj->mem_vtbl.invalidate_buf(buffer.index,
+ my_obj->mem_vtbl.user_data);
+ if ( 0 > rc ) {
+ LOGE("Cache invalidate failed on buffer index: %d",
+ buffer.index);
+ return rc;
+ }
+ } else {
+ LOGE("Cache invalidate op not added");
+ }
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->queued_buffer_count++;
+ if (1 == my_obj->queued_buffer_count) {
+ /* Add fd to data poll thread */
+ LOGH("Starting poll on stream %p type: %d",
+ my_obj,my_obj->stream_info->stream_type);
+ rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, my_obj->fd, mm_stream_data_notify, (void*)my_obj,
+ mm_camera_async_call);
+ if (0 > rc) {
+ LOGE("Add poll on stream %p type: %d fd error (rc=%d)",
+ my_obj, my_obj->stream_info->stream_type, rc);
+ } else {
+ LOGH("Started poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if (0 > rc) {
+ LOGE("VIDIOC_QBUF ioctl call failed on stream type %d (rc=%d): %s",
+ my_obj->stream_info->stream_type, rc, strerror(errno));
+ my_obj->queued_buffer_count--;
+ if (0 == my_obj->queued_buffer_count) {
+ /* Remove fd from data poll in case of failing
+ * first buffer queuing attempt */
+ LOGH("Stoping poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl, mm_camera_async_call);
+ LOGH("Stopped poll on stream %p type: %d",
+ my_obj, my_obj->stream_info->stream_type);
+ }
+ } else {
+ LOGH("VIDIOC_QBUF buf_index %d, frame_idx %d stream type %d, rc %d,"
+ " queued: %d, buf_type = %d",
+ buffer.index, buf->frame_idx, my_obj->stream_info->stream_type, rc,
+ my_obj->queued_buffer_count, buf->buf_type);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_request_buf
+ *
+ * DESCRIPTION: This function let kernel know the amount of buffers need to
+ * be registered via v4l2 ioctl.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_request_buf(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_requestbuffers bufreq;
+ uint8_t buf_num = my_obj->buf_num;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ LOGD("buf_num = %d, stream type = %d",
+ buf_num, my_obj->stream_info->stream_type);
+
+ if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+ LOGE("buf num %d > max limit %d\n",
+ buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+ return -1;
+ }
+
+ memset(&bufreq, 0, sizeof(bufreq));
+ bufreq.count = buf_num;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ LOGE("fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d, errno %d",
+ my_obj->fd, rc, errno);
+ }
+
+ LOGD("X rc = %d",rc);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_need_wait_for_mapping
+ *
+ * DESCRIPTION: Utility function to determine whether to wait for mapping
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int8_t whether wait is necessary
+ * 0 -- no wait
+ * 1 -- wait
+ *==========================================================================*/
+int8_t mm_stream_need_wait_for_mapping(mm_stream_t * my_obj)
+{
+ uint32_t i;
+ int8_t ret = 0;
+
+ for (i = 0; i < my_obj->buf_num; i++) {
+ if ((my_obj->buf_status[i].map_status == 0)
+ && (my_obj->buf_status[i].in_kernel)) {
+ /*do not signal in case if any buffer is not mapped
+ but queued to kernel.*/
+ ret = 1;
+ } else if (my_obj->buf_status[i].map_status < 0) {
+ return 0;
+ }
+ }
+
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_map_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_type : type of buffer to be mapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ * @fd : file descriptor of the buffer
+ * @size : size of the buffer
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_map_buf(mm_stream_t * my_obj,
+ uint8_t buf_type,
+ uint32_t frame_idx,
+ int32_t plane_idx,
+ int32_t fd,
+ size_t size)
+{
+ int32_t rc = 0;
+ if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+ LOGE("NULL obj of stream/channel/camera");
+ return -1;
+ }
+
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+ packet.payload.buf_map.type = buf_type;
+ packet.payload.buf_map.fd = fd;
+ packet.payload.buf_map.size = size;
+ packet.payload.buf_map.stream_id = my_obj->server_stream_id;
+ packet.payload.buf_map.frame_idx = frame_idx;
+ packet.payload.buf_map.plane_idx = plane_idx;
+ LOGD("mapping buf_type %d, stream_id %d, frame_idx %d, fd %d, size %d",
+ buf_type, my_obj->server_stream_id, frame_idx, fd, size);
+ rc = mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+ &packet, sizeof(cam_sock_packet_t), fd);
+
+ if ((buf_type == CAM_MAPPING_BUF_TYPE_STREAM_BUF)
+ || ((buf_type
+ == CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF)
+ && (my_obj->stream_info != NULL)
+ && (my_obj->stream_info->streaming_mode
+ == CAM_STREAMING_MODE_BATCH))) {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if (rc < 0) {
+ my_obj->buf_status[frame_idx].map_status = -1;
+ } else {
+ my_obj->buf_status[frame_idx].map_status = 1;
+ }
+ if (mm_stream_need_wait_for_mapping(my_obj) == 0) {
+ LOGD("Buffer mapping Done: Signal strm fd = %d",
+ my_obj->fd);
+ pthread_cond_signal(&my_obj->buf_cond);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_map_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_map_list : list of buffer objects to map
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+
+int32_t mm_stream_map_bufs(mm_stream_t * my_obj,
+ const cam_buf_map_type_list *buf_map_list)
+{
+ if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+ LOGE("NULL obj of stream/channel/camera");
+ return -1;
+ }
+
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
+
+ memcpy(&packet.payload.buf_map_list, buf_map_list,
+ sizeof(packet.payload.buf_map_list));
+
+ int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
+ uint32_t numbufs = packet.payload.buf_map_list.length;
+ if (numbufs < 1) {
+ LOGD("No buffers, suppressing the mapping command");
+ return 0;
+ }
+
+ uint32_t i;
+ for (i = 0; i < numbufs; i++) {
+ packet.payload.buf_map_list.buf_maps[i].stream_id = my_obj->server_stream_id;
+ sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
+ }
+
+ for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
+ packet.payload.buf_map_list.buf_maps[i].fd = -1;
+ sendfds[i] = -1;
+ }
+
+ int32_t ret = mm_camera_util_bundled_sendmsg(my_obj->ch_obj->cam_obj,
+ &packet, sizeof(cam_sock_packet_t), sendfds, numbufs);
+ if ((numbufs > 0) && ((buf_map_list->buf_maps[0].type
+ == CAM_MAPPING_BUF_TYPE_STREAM_BUF)
+ || ((buf_map_list->buf_maps[0].type ==
+ CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF)
+ && (my_obj->stream_info != NULL)
+ && (my_obj->stream_info->streaming_mode
+ == CAM_STREAMING_MODE_BATCH)))) {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for (i = 0; i < numbufs; i++) {
+ if (ret < 0) {
+ my_obj->buf_status[i].map_status = -1;
+ } else {
+ my_obj->buf_status[i].map_status = 1;
+ }
+ }
+
+ if (mm_stream_need_wait_for_mapping(my_obj) == 0) {
+ LOGD("Buffer mapping Done: Signal strm fd = %d",
+ my_obj->fd);
+ pthread_cond_signal(&my_obj->buf_cond);
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_unmap_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @buf_type : type of buffer to be unmapped. could be following values:
+ * CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ * CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @frame_idx : index of buffer within the stream buffers, only valid if
+ * buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ * CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ * @plane_idx : plane index. If all planes share the same fd,
+ * plane_idx = -1; otherwise, plean_idx is the
+ * index to plane (0..num_of_planes)
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unmap_buf(mm_stream_t * my_obj,
+ uint8_t buf_type,
+ uint32_t frame_idx,
+ int32_t plane_idx)
+{
+ if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+ LOGE("NULL obj of stream/channel/camera");
+ return -1;
+ }
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+ packet.payload.buf_unmap.type = buf_type;
+ packet.payload.buf_unmap.stream_id = my_obj->server_stream_id;
+ packet.payload.buf_unmap.frame_idx = frame_idx;
+ packet.payload.buf_unmap.plane_idx = plane_idx;
+ int32_t ret = mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+ &packet,
+ sizeof(cam_sock_packet_t),
+ -1);
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[frame_idx].map_status = 0;
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return ret;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_init_bufs
+ *
+ * DESCRIPTION: initialize stream buffers needed. This function will request
+ * buffers needed from upper layer through the mem ops table passed
+ * during configuration stage.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
+{
+ int32_t i, rc = 0;
+ uint8_t *reg_flags = NULL;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* deinit buf if it's not NULL*/
+ if (NULL != my_obj->buf) {
+ mm_stream_deinit_bufs(my_obj);
+ }
+
+ rc = my_obj->mem_vtbl.get_bufs(&my_obj->frame_offset,
+ &my_obj->buf_num,
+ &reg_flags,
+ &my_obj->buf,
+ &my_obj->map_ops,
+ my_obj->mem_vtbl.user_data);
+
+ if (0 != rc) {
+ LOGE("Error get buf, rc = %d\n", rc);
+ return rc;
+ }
+
+ for (i = 0; i < my_obj->buf_num; i++) {
+ my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
+ my_obj->buf[i].stream_id = my_obj->my_hdl;
+ my_obj->buf[i].stream_type = my_obj->stream_info->stream_type;
+
+ if (my_obj->buf[i].buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+ my_obj->buf[i].user_buf.bufs_used =
+ (int8_t)my_obj->stream_info->user_buf_info.frame_buf_cnt;
+ my_obj->buf[i].user_buf.buf_in_use = reg_flags[i];
+ }
+ }
+
+ if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ my_obj->plane_buf = my_obj->buf[0].user_buf.plane_buf;
+ if (my_obj->plane_buf != NULL) {
+ my_obj->plane_buf_num =
+ my_obj->buf_num *
+ my_obj->stream_info->user_buf_info.frame_buf_cnt;
+ for (i = 0; i < my_obj->plane_buf_num; i++) {
+ my_obj->plane_buf[i].stream_id = my_obj->my_hdl;
+ my_obj->plane_buf[i].stream_type = my_obj->stream_info->stream_type;
+ }
+ }
+ my_obj->cur_bufs_staged = 0;
+ my_obj->cur_buf_idx = -1;
+ }
+
+ free(reg_flags);
+ reg_flags = NULL;
+
+ /* update in stream info about number of stream buffers */
+ my_obj->stream_info->num_bufs = my_obj->buf_num;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_deinit_bufs
+ *
+ * DESCRIPTION: return stream buffers to upper layer through the mem ops table
+ * passed during configuration stage.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+
+ mm_camera_map_unmap_ops_tbl_t ops_tbl;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ if (NULL == my_obj->buf) {
+ LOGD("Buf is NULL, no need to deinit");
+ return rc;
+ }
+
+ /* release bufs */
+ ops_tbl.map_ops = mm_stream_map_buf_ops;
+ ops_tbl.bundled_map_ops = mm_stream_bundled_map_buf_ops;
+ ops_tbl.unmap_ops = mm_stream_unmap_buf_ops;
+ ops_tbl.userdata = my_obj;
+
+ rc = my_obj->mem_vtbl.put_bufs(&ops_tbl,
+ my_obj->mem_vtbl.user_data);
+
+ if (my_obj->plane_buf != NULL) {
+ free(my_obj->plane_buf);
+ my_obj->plane_buf = NULL;
+ }
+
+ free(my_obj->buf);
+ my_obj->buf = NULL;
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_reg_buf
+ *
+ * DESCRIPTION: register buffers with kernel by calling v4l2 ioctl QBUF for
+ * each buffer in the stream
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ uint8_t i;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ rc = mm_stream_request_buf(my_obj);
+ if (rc != 0) {
+ return rc;
+ }
+
+ my_obj->queued_buffer_count = 0;
+ for(i = 0; i < my_obj->buf_num; i++){
+ /* check if need to qbuf initially */
+ if (my_obj->buf_status[i].initial_reg_flag) {
+ rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
+ if (rc != 0) {
+ LOGE("VIDIOC_QBUF rc = %d\n", rc);
+ break;
+ }
+ my_obj->buf_status[i].buf_refcnt = 0;
+ my_obj->buf_status[i].in_kernel = 1;
+ } else {
+ /* the buf is held by upper layer, will not queue into kernel.
+ * add buf reference count */
+ my_obj->buf_status[i].buf_refcnt = 1;
+ my_obj->buf_status[i].in_kernel = 0;
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_unreg buf
+ *
+ * DESCRIPTION: unregister all stream buffers from kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj)
+{
+ struct v4l2_requestbuffers bufreq;
+ int32_t i, rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ /* unreg buf to kernel */
+ bufreq.count = 0;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ LOGE("fd=%d, VIDIOC_REQBUFS failed, rc=%d, errno %d",
+ my_obj->fd, rc, errno);
+ }
+
+ /* reset buf reference count */
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for(i = 0; i < my_obj->buf_num; i++){
+ my_obj->buf_status[i].buf_refcnt = 0;
+ my_obj->buf_status[i].in_kernel = 0;
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_get_v4l2_fmt
+ *
+ * DESCRIPTION: translate camera image format into FOURCC code
+ *
+ * PARAMETERS :
+ * @fmt : camera image format
+ *
+ * RETURN : FOURCC code for image format
+ *==========================================================================*/
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt)
+{
+ uint32_t val = 0;
+ switch(fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+ val = V4L2_PIX_FMT_NV12;
+ break;
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+ val = V4L2_PIX_FMT_NV21;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+ val= V4L2_PIX_FMT_SGBRG10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+ val= V4L2_PIX_FMT_SGRBG10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+ val= V4L2_PIX_FMT_SRGGB10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+ val= V4L2_PIX_FMT_SBGGR10;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+ val= V4L2_PIX_FMT_SGBRG12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+ val= V4L2_PIX_FMT_SGRBG12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+ val= V4L2_PIX_FMT_SRGGB12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+ val = V4L2_PIX_FMT_SBGGR12;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
+ val= V4L2_PIX_FMT_SGBRG14;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
+ val= V4L2_PIX_FMT_SGRBG14;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
+ val= V4L2_PIX_FMT_SRGGB14;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+ val = V4L2_PIX_FMT_SBGGR14;
+ break;
+ case CAM_FORMAT_YUV_422_NV61:
+ val= V4L2_PIX_FMT_NV61;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+ val= V4L2_PIX_FMT_YUYV;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+ val= V4L2_PIX_FMT_YVYU;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+ val= V4L2_PIX_FMT_UYVY;
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+ val= V4L2_PIX_FMT_VYUY;
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ val= V4L2_PIX_FMT_NV12;
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ val= V4L2_PIX_FMT_NV16;
+ break;
+ case CAM_FORMAT_Y_ONLY:
+ val= V4L2_PIX_FMT_GREY;
+ break;
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ val= V4L2_PIX_FMT_Y10;
+ break;
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ val= V4L2_PIX_FMT_Y12;
+ break;
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* No v4l2 format is defined yet for CAM_FORMAT_Y_ONLY_14_BPP */
+ /* val= V4L2_PIX_FMT_Y14; */
+ val = 0;
+ LOGE("Unknown fmt=%d", fmt);
+ break;
+ case CAM_FORMAT_MAX:
+ /* CAM_STREAM_TYPE_DEFAULT,
+ * CAM_STREAM_TYPE_OFFLINE_PROC,
+ * and CAM_STREAM_TYPE_METADATA
+ * set fmt to CAM_FORMAT_MAX*/
+ val = 0;
+ break;
+ default:
+ val = 0;
+ LOGE("Unknown fmt=%d", fmt);
+ break;
+ }
+ LOGD("fmt=%d, val =%d", fmt, val);
+ return val;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_preview
+ *
+ * DESCRIPTION: calculate preview frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int stride = 0, scanline = 0;
+
+ uint32_t width_padding = 0;
+ uint32_t height_padding = 0;
+
+ switch (stream_info->fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ width_padding = padding->width_padding;
+ height_padding = CAM_PAD_TO_2;
+ } else {
+ width_padding = padding->width_padding;
+ height_padding = padding->height_padding;
+ }
+
+ stride = PAD_TO_SIZE(dim->width, width_padding);
+ scanline = PAD_TO_SIZE(dim->height, height_padding);
+
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width, width_padding);
+ scanline = PAD_TO_SIZE(dim->height / 2, height_padding);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+ scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset = 0;
+ buf_planes->plane_info.mp[2].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[2].offset_x = 0;
+ buf_planes->plane_info.mp[2].offset_y = 0;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = dim->height;
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ } else {
+ stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+ scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+ }
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+
+ default:
+ LOGE("Invalid cam_format for preview %d",
+ stream_info->fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_post_view
+ *
+ * DESCRIPTION: calculate postview frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int stride = 0, scanline = 0;
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_64);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+ scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_64);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV21_ADRENO:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+ scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset = 0;
+ buf_planes->plane_info.mp[2].len =
+ (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[2].offset_x = 0;
+ buf_planes->plane_info.mp[2].offset_y = 0;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ scanline = dim->height;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGE("Invalid cam_format for preview %d",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_snapshot
+ *
+ * DESCRIPTION: calculate snapshot/postproc frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ uint8_t isAFamily = mm_camera_util_chip_is_a_family();
+ int offset_x = 0, offset_y = 0;
+ int stride = 0, scanline = 0;
+
+ if (isAFamily) {
+ stride = dim->width;
+ scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_16);
+ offset_x = 0;
+ offset_y = scanline - dim->height;
+ scanline += offset_y; /* double padding */
+ } else {
+ offset_x = PAD_TO_SIZE(padding->offset_info.offset_x,
+ padding->plane_padding);
+ offset_y = PAD_TO_SIZE(padding->offset_info.offset_y,
+ padding->plane_padding);
+ stride = PAD_TO_SIZE((dim->width +
+ (2 * offset_x)), padding->width_padding);
+ scanline = PAD_TO_SIZE((dim->height +
+ (2 * offset_y)), padding->height_padding);
+ }
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ scanline = scanline/2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].offset_x = offset_x;
+ buf_planes->plane_info.mp[2].offset_y = offset_y;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+ buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len -
+ buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGD("Video format VENUS is not supported = %d",
+ fmt);
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGE("Invalid cam_format for snapshot %d",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_raw
+ *
+ * DESCRIPTION: calculate raw frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+
+ if ((NULL == dim) || (NULL == padding) || (NULL == buf_planes)) {
+ return -1;
+ }
+
+ int32_t stride = PAD_TO_SIZE(dim->width, (int32_t)padding->width_padding);
+ int32_t stride_in_bytes = stride;
+ int32_t scanline = PAD_TO_SIZE(dim->height, (int32_t)padding->height_padding);
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV21:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x = 0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x = 0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].stride_in_bytes = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+ case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+ case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+ case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+ case CAM_FORMAT_JPEG_RAW_8BIT:
+ /* 1 plane */
+ /* Every 16 pixels occupy 16 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width =
+ (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_META_RAW_8BIT:
+ // Every 16 pixels occupy 16 bytes
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ break;
+
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR:
+ /* 1 plane */
+ /* Every 16 pixels occupy 16 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY:
+ /* Every 12 pixels occupy 16 bytes */
+ stride = (dim->width + 11)/12 * 12;
+ stride_in_bytes = stride * 8 / 6;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY:
+ /* Every 10 pixels occupy 16 bytes */
+ stride = (dim->width + 9)/10 * 10;
+ stride_in_bytes = stride * 8 / 5;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY:
+ /* Every 64 pixels occupy 80 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_4);
+ stride_in_bytes = PAD_TO_SIZE(stride * 5 / 4, CAM_PAD_TO_8);
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY:
+ /* Every 32 pixels occupy 48 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+ stride_in_bytes = stride * 3 / 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR:
+ /* Every 8 pixels occupy 16 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_8);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY:
+ /* Every 64 pixels occupy 112 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+ stride_in_bytes = stride * 7 / 4;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR:
+ case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY:
+ /* Every 16 pixels occupy 32 bytes */
+ stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+ stride_in_bytes = stride * 2;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[0].height = 1;
+ break;
+ default:
+ LOGE("Invalid cam_format %d for raw stream",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_video
+ *
+ * DESCRIPTION: calculate video frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+ cam_dimension_t *dim, cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int stride = 0, scanline = 0;
+ int meta_stride = 0,meta_scanline = 0;
+
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ buf_planes->plane_info.num_planes = 2;
+
+ stride = dim->width;
+ scanline = dim->height;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ CAM_PAD_TO_2K);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = dim->width;
+ scanline = dim->height / 2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ CAM_PAD_TO_2K);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len -
+ buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+#else
+ LOGD("Video format VENUS is not supported = %d",
+ fmt);
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len -
+ buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGD("Video format VENUS is not supported = %d",
+ fmt);
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+
+#else
+ LOGD("Video format UBWC is not supported = %d",
+ fmt);
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGD("Invalid Video Format = %d", fmt);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_metadata
+ *
+ * DESCRIPTION: calculate metadata frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ buf_planes->plane_info.num_planes = 1;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(dim->width * dim->height),
+ padding->plane_padding);
+ buf_planes->plane_info.frame_len =
+ buf_planes->plane_info.mp[0].len;
+
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = dim->width;
+ buf_planes->plane_info.mp[0].scanline = dim->height;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_analysis
+ *
+ * DESCRIPTION: calculate analysis frame offset based on format and
+ * padding information
+ *
+ * PARAMETERS :
+ * @fmt : image format
+ * @dim : image dimension
+ * @padding : padding information
+ * @buf_planes : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_analysis(cam_format_t fmt,
+ cam_dimension_t *dim,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *buf_planes)
+{
+ int32_t rc = 0;
+ int32_t offset_x = 0, offset_y = 0;
+ int32_t stride, scanline;
+
+ /* Clip to minimum supported bytes per line */
+ if ((uint32_t)dim->width < padding->min_stride) {
+ stride = (int32_t)padding->min_stride;
+ } else {
+ stride = dim->width;
+ }
+
+ if ((uint32_t)dim->height < padding->min_scanline) {
+ scanline = (int32_t)padding->min_scanline;
+ } else {
+ scanline = dim->height;
+ }
+
+ stride = PAD_TO_SIZE(stride, padding->width_padding);
+ scanline = PAD_TO_SIZE(scanline, padding->height_padding);
+
+ switch (fmt) {
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV21:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_YV12:
+ /* 3 planes: Y + Cr + Cb */
+ buf_planes->plane_info.num_planes = 3;
+
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+ scanline = scanline / 2;
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width / 2;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+ buf_planes->plane_info.mp[2].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[2].offset_x = offset_x;
+ buf_planes->plane_info.mp[2].offset_y = offset_y;
+ buf_planes->plane_info.mp[2].stride = stride;
+ buf_planes->plane_info.mp[2].scanline = scanline;
+ buf_planes->plane_info.mp[2].width = dim->width / 2;
+ buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+ buf_planes->plane_info.mp[1].len +
+ buf_planes->plane_info.mp[2].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_422_NV16:
+ case CAM_FORMAT_YUV_422_NV61:
+ /* 2 planes: Y + CbCr */
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+
+ buf_planes->plane_info.mp[1].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[1].offset_x = offset_x;
+ buf_planes->plane_info.mp[1].offset_y = offset_y;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height;
+
+ buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+ buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+ CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_Y_ONLY:
+ case CAM_FORMAT_Y_ONLY_10_BPP:
+ case CAM_FORMAT_Y_ONLY_12_BPP:
+ case CAM_FORMAT_Y_ONLY_14_BPP:
+ buf_planes->plane_info.num_planes = 1;
+
+ buf_planes->plane_info.mp[0].len =
+ PAD_TO_SIZE((uint32_t)(stride * scanline),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset =
+ PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+ padding->plane_padding);
+ buf_planes->plane_info.mp[0].offset_x = offset_x;
+ buf_planes->plane_info.mp[0].offset_y = offset_y;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.frame_len =
+ PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+ break;
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+ // using Venus
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV21, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+ LOGE("Venus hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+ {
+ int meta_stride = 0,meta_scanline = 0;
+ // using UBWC
+ stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+ buf_planes->plane_info.frame_len =
+ VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, stride, scanline);
+ buf_planes->plane_info.num_planes = 2;
+ buf_planes->plane_info.mp[0].offset = 0;
+ buf_planes->plane_info.mp[0].offset_x =0;
+ buf_planes->plane_info.mp[0].offset_y = 0;
+ buf_planes->plane_info.mp[0].stride = stride;
+ buf_planes->plane_info.mp[0].scanline = scanline;
+ buf_planes->plane_info.mp[0].width = dim->width;
+ buf_planes->plane_info.mp[0].height = dim->height;
+ buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[0].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[0].len =
+ (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+ (buf_planes->plane_info.mp[0].meta_len));
+
+ stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+ meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+ buf_planes->plane_info.mp[1].offset = 0;
+ buf_planes->plane_info.mp[1].offset_x =0;
+ buf_planes->plane_info.mp[1].offset_y = 0;
+ buf_planes->plane_info.mp[1].stride = stride;
+ buf_planes->plane_info.mp[1].scanline = scanline;
+ buf_planes->plane_info.mp[1].width = dim->width;
+ buf_planes->plane_info.mp[1].height = dim->height/2;
+ buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+ buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+ buf_planes->plane_info.mp[1].meta_len =
+ MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+ buf_planes->plane_info.mp[1].len =
+ buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+ }
+#else
+ LOGE("UBWC hardware not avail, cannot use this format");
+ rc = -1;
+#endif
+ break;
+ default:
+ LOGE("Invalid cam_format for anlysis %d",
+ fmt);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset_postproc
+ *
+ * DESCRIPTION: calculate postprocess frame offset
+ *
+ * PARAMETERS :
+ * @stream_info: ptr to stream info
+ * @padding : padding information
+ * @plns : [out] buffer plane information
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+ cam_padding_info_t *padding,
+ cam_stream_buf_plane_info_t *plns)
+{
+ int32_t rc = 0;
+ cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+ if (stream_info->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+ type = stream_info->reprocess_config.offline.input_type;
+ if (CAM_STREAM_TYPE_DEFAULT == type) {
+ if (plns->plane_info.frame_len == 0) {
+ // take offset from input source
+ *plns = stream_info->reprocess_config.offline.input_buf_planes;
+ return rc;
+ }
+ } else {
+ type = stream_info->reprocess_config.offline.input_type;
+ }
+ } else {
+ type = stream_info->reprocess_config.online.input_stream_type;
+ }
+
+ switch (type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ rc = mm_stream_calc_offset_preview(stream_info,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ rc = mm_stream_calc_offset_post_view(stream_info->fmt,
+ &stream_info->dim,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_CALLBACK:
+ rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ rc = mm_stream_calc_offset_video(stream_info->fmt,
+ &stream_info->dim, plns);
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ rc = mm_stream_calc_offset_raw(stream_info->fmt,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ rc = mm_stream_calc_offset_analysis(stream_info->fmt,
+ &stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ rc = mm_stream_calc_offset_metadata(&stream_info->dim,
+ padding,
+ plns);
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+ &stream_info->dim, padding, plns);
+ break;
+ default:
+ LOGE("not supported for stream type %d",
+ type);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+/*===========================================================================
+* FUNCTION : mm_stream_calc_lcm
+*
+* DESCRIPTION: calculate LCM of two numbers
+*
+* PARAMETERS :
+* @num1 : number 1
+* @num2 : number 2
+*
+* RETURN : uint32_t type
+*
+*===========================================================================*/
+uint32_t mm_stream_calc_lcm(int32_t num1, int32_t num2)
+{
+ uint32_t lcm = 0;
+ uint32_t temp = 0;
+
+ if ((num1 < 1) && (num2 < 1)) {
+ return 0;
+ } else if (num1 < 1) {
+ return num2;
+ } else if (num2 < 1) {
+ return num1;
+ }
+
+ if (num1 > num2) {
+ lcm = num1;
+ } else {
+ lcm = num2;
+ }
+ temp = lcm;
+
+ while (1) {
+ if (((lcm%num1) == 0) && ((lcm%num2) == 0)) {
+ break;
+ }
+ lcm += temp;
+ }
+ return lcm;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_calc_offset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+
+ cam_dimension_t dim = my_obj->stream_info->dim;
+ if (my_obj->stream_info->pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION &&
+ my_obj->stream_info->stream_type != CAM_STREAM_TYPE_VIDEO) {
+ if (my_obj->stream_info->pp_config.rotation == ROTATE_90 ||
+ my_obj->stream_info->pp_config.rotation == ROTATE_270) {
+ // rotated by 90 or 270, need to switch width and height
+ dim.width = my_obj->stream_info->dim.height;
+ dim.height = my_obj->stream_info->dim.width;
+ }
+ }
+
+ switch (my_obj->stream_info->stream_type) {
+ case CAM_STREAM_TYPE_PREVIEW:
+ rc = mm_stream_calc_offset_preview(my_obj->stream_info,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_POSTVIEW:
+ rc = mm_stream_calc_offset_post_view(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_SNAPSHOT:
+ case CAM_STREAM_TYPE_CALLBACK:
+ rc = mm_stream_calc_offset_snapshot(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_OFFLINE_PROC:
+ rc = mm_stream_calc_offset_postproc(my_obj->stream_info,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_VIDEO:
+ rc = mm_stream_calc_offset_video(my_obj->stream_info->fmt,
+ &dim, &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_RAW:
+ rc = mm_stream_calc_offset_raw(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_ANALYSIS:
+ rc = mm_stream_calc_offset_analysis(my_obj->stream_info->fmt,
+ &dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ case CAM_STREAM_TYPE_METADATA:
+ rc = mm_stream_calc_offset_metadata(&dim,
+ &my_obj->padding_info,
+ &my_obj->stream_info->buf_planes);
+ break;
+ default:
+ LOGE("not supported for stream type %d",
+ my_obj->stream_info->stream_type);
+ rc = -1;
+ break;
+ }
+
+ my_obj->frame_offset = my_obj->stream_info->buf_planes.plane_info;
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_sync_info
+ *
+ * DESCRIPTION: synchronize stream information with server
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ * NOTE : assume stream info buffer is mapped to server and filled in with
+ * stream information by upper layer. This call will let server to
+ * synchornize the stream information with HAL. If server find any
+ * fields that need to be changed accroding to hardware configuration,
+ * server will modify corresponding fields so that HAL could know
+ * about it.
+ *==========================================================================*/
+int32_t mm_stream_sync_info(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ int32_t value = 0;
+ my_obj->stream_info->stream_svr_id = my_obj->server_stream_id;
+ rc = mm_stream_calc_offset(my_obj);
+
+ if (rc == 0) {
+ rc = mm_camera_util_s_ctrl(my_obj->fd,
+ CAM_PRIV_STREAM_INFO_SYNC,
+ &value);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_set_fmt
+ *
+ * DESCRIPTION: set stream format to kernel via v4l2 ioctl
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_format fmt;
+ struct msm_v4l2_format_data msm_fmt;
+ int i;
+
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ if (my_obj->stream_info->dim.width == 0 ||
+ my_obj->stream_info->dim.height == 0) {
+ LOGE("invalid input[w=%d,h=%d,fmt=%d]\n",
+ my_obj->stream_info->dim.width,
+ my_obj->stream_info->dim.height,
+ my_obj->stream_info->fmt);
+ return -1;
+ }
+
+ memset(&fmt, 0, sizeof(fmt));
+ memset(&msm_fmt, 0, sizeof(msm_fmt));
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ msm_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+
+ msm_fmt.width = (unsigned int)my_obj->stream_info->dim.width;
+ msm_fmt.height = (unsigned int)my_obj->stream_info->dim.height;
+ msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);
+
+ if (my_obj->stream_info->streaming_mode != CAM_STREAMING_MODE_BATCH) {
+ msm_fmt.num_planes = (unsigned char)my_obj->frame_offset.num_planes;
+ for (i = 0; i < msm_fmt.num_planes; i++) {
+ msm_fmt.plane_sizes[i] = my_obj->frame_offset.mp[i].len;
+ }
+ } else {
+ msm_fmt.num_planes = 1;
+ msm_fmt.plane_sizes[0] = my_obj->stream_info->user_buf_info.size;
+ }
+
+ memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
+ rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
+ if (rc < 0) {
+ LOGE("ioctl failed %d, errno %d", rc, errno);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_buf_done
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @frame : frame to be enqueued back to kernel
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *frame)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+ LOGE("Error Trying to free second time?(idx=%d) count=%d\n",
+ frame->buf_idx,
+ my_obj->buf_status[frame->buf_idx].buf_refcnt);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ rc = -1;
+ return rc;
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+ rc = mm_stream_write_user_buf(my_obj, frame);
+ } else {
+ pthread_mutex_lock(&my_obj->buf_lock);
+ my_obj->buf_status[frame->buf_idx].buf_refcnt--;
+ if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ LOGD("<DEBUG> : Buf done for buffer:%d, stream:%d", frame->buf_idx, frame->stream_type);
+ rc = mm_stream_qbuf(my_obj, frame);
+ if(rc < 0) {
+ LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ frame->buf_idx, rc);
+ } else {
+ my_obj->buf_status[frame->buf_idx].in_kernel = 1;
+ }
+ }else{
+ LOGD("<DEBUG> : Still ref count pending count :%d",
+ my_obj->buf_status[frame->buf_idx].buf_refcnt);
+ LOGD("<DEBUG> : for buffer:%p:%d",
+ my_obj, frame->buf_idx);
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ }
+ }
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION : mm_stream_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ *
+ * RETURN : queued buffer count
+ *==========================================================================*/
+int32_t mm_stream_get_queued_buf_count(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+ pthread_mutex_lock(&my_obj->buf_lock);
+ rc = my_obj->queued_buffer_count;
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_stream_reg_buf_cb
+ *
+ * DESCRIPTION: Allow other stream to register dataCB at this stream.
+ *
+ * PARAMETERS :
+ * @my_obj : stream object
+ * @val : callback function to be registered
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+ mm_stream_data_cb_t val)
+{
+ int32_t rc = -1;
+ uint8_t i;
+ LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+ my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL == my_obj->buf_cb[i].cb) {
+ my_obj->buf_cb[i] = val;
+ rc = 0;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
new file mode 100644
index 0000000..0c740b4
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -0,0 +1,698 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+typedef enum {
+ /* poll entries updated */
+ MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED,
+ /* poll entries updated asynchronous */
+ MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC,
+ /* commit updates */
+ MM_CAMERA_PIPE_CMD_COMMIT,
+ /* exit */
+ MM_CAMERA_PIPE_CMD_EXIT,
+ /* max count */
+ MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+ MM_CAMERA_POLL_TASK_STATE_STOPPED,
+ MM_CAMERA_POLL_TASK_STATE_POLL, /* polling pid in polling state. */
+ MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+ uint32_t cmd;
+ mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_sig_async
+ *
+ * DESCRIPTION: Asynchoronous call to send a command through pipe.
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @cmd : command to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig_async(mm_camera_poll_thread_t *poll_cb,
+ uint32_t cmd)
+{
+ /* send through pipe */
+ /* get the mutex */
+ mm_camera_sig_evt_t cmd_evt;
+
+ LOGD("E cmd = %d",cmd);
+ memset(&cmd_evt, 0, sizeof(cmd_evt));
+ cmd_evt.cmd = cmd;
+ pthread_mutex_lock(&poll_cb->mutex);
+ /* reset the statue to false */
+ poll_cb->status = FALSE;
+
+ /* send cmd to worker */
+ ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+ if (len < 1) {
+ LOGW("len = %lld, errno = %d",
+ (long long int)len, errno);
+ /* Avoid waiting for the signal */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return 0;
+ }
+ LOGD("begin IN mutex write done, len = %lld",
+ (long long int)len);
+ pthread_mutex_unlock(&poll_cb->mutex);
+ LOGD("X");
+ return 0;
+}
+
+
+
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_sig
+ *
+ * DESCRIPTION: synchorinzed call to send a command through pipe.
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @cmd : command to be sent
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+ uint32_t cmd)
+{
+ /* send through pipe */
+ /* get the mutex */
+ mm_camera_sig_evt_t cmd_evt;
+
+ LOGD("E cmd = %d",cmd);
+ memset(&cmd_evt, 0, sizeof(cmd_evt));
+ cmd_evt.cmd = cmd;
+ pthread_mutex_lock(&poll_cb->mutex);
+ /* reset the statue to false */
+ poll_cb->status = FALSE;
+ /* send cmd to worker */
+
+ ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+ if(len < 1) {
+ LOGW("len = %lld, errno = %d",
+ (long long int)len, errno);
+ /* Avoid waiting for the signal */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return 0;
+ }
+ LOGD("begin IN mutex write done, len = %lld",
+ (long long int)len);
+ /* wait till worker task gives positive signal */
+ if (FALSE == poll_cb->status) {
+ LOGD("wait");
+ pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+ }
+ /* done */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ LOGD("X");
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_sig
+ *
+ * DESCRIPTION: signal the status of done
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = TRUE;
+ pthread_cond_signal(&poll_cb->cond_v);
+ LOGD("done, in mutex");
+ pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_set_state
+ *
+ * DESCRIPTION: set a polling state
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @state : polling state (stopped/polling)
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+ mm_camera_poll_task_state_type_t state)
+{
+ poll_cb->state = state;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_proc_pipe
+ *
+ * DESCRIPTION: polling thread routine to process pipe
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+ ssize_t read_len;
+ int i;
+ mm_camera_sig_evt_t cmd_evt;
+ read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
+ LOGD("read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
+ poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
+ switch (cmd_evt.cmd) {
+ case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
+ case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
+ /* we always have index 0 for pipe read */
+ poll_cb->num_fds = 0;
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+
+ if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type &&
+ poll_cb->num_fds < MAX_STREAM_NUM_IN_BUNDLE) {
+ if (poll_cb->poll_entries[0].fd >= 0) {
+ /* fd is valid, we update poll_fds */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+ }
+ } else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type &&
+ poll_cb->num_fds <= MAX_STREAM_NUM_IN_BUNDLE) {
+ for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+ if(poll_cb->poll_entries[i].fd >= 0) {
+ /* fd is valid, we update poll_fds to this fd */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+ } else {
+ /* fd is invalid, we set the entry to -1 to prevent polling.
+ * According to spec, polling will not poll on entry with fd=-1.
+ * If this is not the case, we need to skip these invalid fds
+ * when updating this array.
+ * We still keep fd=-1 in this array because this makes easier to
+ * map cb associated with this fd once incoming data avail by directly
+ * using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
+ poll_cb->poll_fds[poll_cb->num_fds].events = 0;
+ poll_cb->num_fds++;
+ }
+ }
+ }
+ if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+
+ case MM_CAMERA_PIPE_CMD_COMMIT:
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+ case MM_CAMERA_PIPE_CMD_EXIT:
+ default:
+ mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+ }
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_fn
+ *
+ * DESCRIPTION: polling thread routine
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+ int rc = 0, i;
+
+ if (NULL == poll_cb) {
+ LOGE("poll_cb is NULL!\n");
+ return NULL;
+ }
+ LOGD("poll type = %d, num_fd = %d poll_cb = %p\n",
+ poll_cb->poll_type, poll_cb->num_fds,poll_cb);
+ do {
+ for(i = 0; i < poll_cb->num_fds; i++) {
+ poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+ }
+
+ rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
+ if(rc > 0) {
+ if ((poll_cb->poll_fds[0].revents & POLLIN) &&
+ (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
+ /* if we have data on pipe, we only process pipe in this iteration */
+ LOGD("cmd received on pipe\n");
+ mm_camera_poll_proc_pipe(poll_cb);
+ } else {
+ for(i=1; i<poll_cb->num_fds; i++) {
+ /* Checking for ctrl events */
+ if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+ (poll_cb->poll_fds[i].revents & POLLPRI)) {
+ LOGD("mm_camera_evt_notify\n");
+ if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+ poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+ }
+ }
+
+ if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
+ (poll_cb->poll_fds[i].revents & POLLIN) &&
+ (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
+ LOGD("mm_stream_data_notify\n");
+ if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+ poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+ }
+ }
+ }
+ }
+ } else {
+ /* in error case sleep 10 us and then continue. hard coded here */
+ usleep(10);
+ continue;
+ }
+ } while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread
+ *
+ * DESCRIPTION: polling thread entry function
+ *
+ * PARAMETERS :
+ * @data : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+static void *mm_camera_poll_thread(void *data)
+{
+ mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
+
+ mm_camera_cmd_thread_name(poll_cb->threadName);
+ /* add pipe read fd into poll first */
+ poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
+
+ mm_camera_poll_sig_done(poll_cb);
+ mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
+ return mm_camera_poll_fn(poll_cb);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread
+ *
+ * DESCRIPTION: notify the polling thread that entries for polling fd have
+ * been updated
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_notify_entries_updated(mm_camera_poll_thread_t * poll_cb)
+{
+ /* send poll entries updated signal to poll thread */
+ return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread_commit_updates
+ *
+ * DESCRIPTION: sync with all previously pending async updates
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_commit_updates(mm_camera_poll_thread_t * poll_cb)
+{
+ return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_COMMIT);
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread_add_poll_fd
+ *
+ * DESCRIPTION: add a new fd into polling thread
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @handler : stream handle if channel data polling thread,
+ * 0 if event polling thread
+ * @fd : file descriptor need to be added into polling thread
+ * @notify_cb : callback function to handle if any notify from fd
+ * @userdata : user data ptr
+ * @call_type : Whether its Synchronous or Asynchronous call
+ *
+ * RETURN : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_add_poll_fd(mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ int32_t fd,
+ mm_camera_poll_notify_t notify_cb,
+ void* userdata,
+ mm_camera_call_type_t call_type)
+{
+ int32_t rc = -1;
+ uint8_t idx = 0;
+
+ if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+ /* get stream idx from handler if CH type */
+ idx = mm_camera_util_get_index_by_handler(handler);
+ } else {
+ /* for EVT type, only idx=0 is valid */
+ idx = 0;
+ }
+
+ if (MAX_STREAM_NUM_IN_BUNDLE > idx) {
+ poll_cb->poll_entries[idx].fd = fd;
+ poll_cb->poll_entries[idx].handler = handler;
+ poll_cb->poll_entries[idx].notify_cb = notify_cb;
+ poll_cb->poll_entries[idx].user_data = userdata;
+ /* send poll entries updated signal to poll thread */
+ if (call_type == mm_camera_sync_call ) {
+ rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+ } else {
+ rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+ }
+ } else {
+ LOGE("invalid handler %d (%d)", handler, idx);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : mm_camera_poll_thread_del_poll_fd
+ *
+ * DESCRIPTION: delete a fd from polling thread
+ *
+ * PARAMETERS :
+ * @poll_cb : ptr to poll thread object
+ * @handler : stream handle if channel data polling thread,
+ * 0 if event polling thread
+ *
+ * RETURN : int32_t type of status
+ * 0 -- success
+ * -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_del_poll_fd(mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ mm_camera_call_type_t call_type)
+{
+ int32_t rc = -1;
+ uint8_t idx = 0;
+
+ if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+ /* get stream idx from handler if CH type */
+ idx = mm_camera_util_get_index_by_handler(handler);
+ } else {
+ /* for EVT type, only idx=0 is valid */
+ idx = 0;
+ }
+
+ if ((MAX_STREAM_NUM_IN_BUNDLE > idx) &&
+ (handler == poll_cb->poll_entries[idx].handler)) {
+ /* reset poll entry */
+ poll_cb->poll_entries[idx].fd = -1; /* set fd to invalid */
+ poll_cb->poll_entries[idx].handler = 0;
+ poll_cb->poll_entries[idx].notify_cb = NULL;
+
+ /* send poll entries updated signal to poll thread */
+ if (call_type == mm_camera_sync_call ) {
+ rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+ } else {
+ rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+ }
+ } else {
+ if ((MAX_STREAM_NUM_IN_BUNDLE <= idx) ||
+ (poll_cb->poll_entries[idx].handler != 0)) {
+ LOGE("invalid handler %d (%d)", poll_cb->poll_entries[idx].handler,
+ idx);
+ rc = -1;
+ } else {
+ LOGW("invalid handler %d (%d)", handler, idx);
+ rc = 0;
+ }
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
+ mm_camera_poll_thread_type_t poll_type)
+{
+ int32_t rc = 0;
+ size_t i = 0, cnt = 0;
+ poll_cb->poll_type = poll_type;
+
+ //Initialize poll_fds
+ cnt = sizeof(poll_cb->poll_fds) / sizeof(poll_cb->poll_fds[0]);
+ for (i = 0; i < cnt; i++) {
+ poll_cb->poll_fds[i].fd = -1;
+ }
+ //Initialize poll_entries
+ cnt = sizeof(poll_cb->poll_entries) / sizeof(poll_cb->poll_entries[0]);
+ for (i = 0; i < cnt; i++) {
+ poll_cb->poll_entries[i].fd = -1;
+ }
+ //Initialize pipe fds
+ poll_cb->pfds[0] = -1;
+ poll_cb->pfds[1] = -1;
+ rc = pipe(poll_cb->pfds);
+ if(rc < 0) {
+ LOGE("pipe open rc=%d\n", rc);
+ return -1;
+ }
+
+ poll_cb->timeoutms = -1; /* Infinite seconds */
+
+ LOGD("poll_type = %d, read fd = %d, write fd = %d timeout = %d",
+ poll_cb->poll_type,
+ poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
+
+ pthread_mutex_init(&poll_cb->mutex, NULL);
+ pthread_cond_init(&poll_cb->cond_v, NULL);
+
+ /* launch the thread */
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = 0;
+ pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+ if(!poll_cb->status) {
+ pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+ }
+
+ pthread_mutex_unlock(&poll_cb->mutex);
+ LOGD("End");
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb)
+{
+ int32_t rc = 0;
+ if(MM_CAMERA_POLL_TASK_STATE_STOPPED == poll_cb->state) {
+ LOGE("err, poll thread is not running.\n");
+ return rc;
+ }
+
+ /* send exit signal to poll thread */
+ mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+ /* wait until poll thread exits */
+ if (pthread_join(poll_cb->pid, NULL) != 0) {
+ LOGD("pthread dead already\n");
+ }
+
+ /* close pipe */
+ if(poll_cb->pfds[0] >= 0) {
+ close(poll_cb->pfds[0]);
+ }
+ if(poll_cb->pfds[1] >= 0) {
+ close(poll_cb->pfds[1]);
+ }
+
+ pthread_mutex_destroy(&poll_cb->mutex);
+ pthread_cond_destroy(&poll_cb->cond_v);
+ memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+ poll_cb->pfds[0] = -1;
+ poll_cb->pfds[1] = -1;
+ return rc;
+}
+
+static void *mm_camera_cmd_thread(void *data)
+{
+ int running = 1;
+ int ret;
+ mm_camera_cmd_thread_t *cmd_thread =
+ (mm_camera_cmd_thread_t *)data;
+ mm_camera_cmdcb_t* node = NULL;
+
+ mm_camera_cmd_thread_name(cmd_thread->threadName);
+ do {
+ do {
+ ret = cam_sem_wait(&cmd_thread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ /* we got notified about new cmd avail in cmd queue */
+ node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+ while (node != NULL) {
+ switch (node->cmd_type) {
+ case MM_CAMERA_CMD_TYPE_EVT_CB:
+ case MM_CAMERA_CMD_TYPE_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY:
+ case MM_CAMERA_CMD_TYPE_START_ZSL:
+ case MM_CAMERA_CMD_TYPE_STOP_ZSL:
+ case MM_CAMERA_CMD_TYPE_GENERAL:
+ case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE:
+ if (NULL != cmd_thread->cb) {
+ cmd_thread->cb(node, cmd_thread->user_data);
+ }
+ break;
+ case MM_CAMERA_CMD_TYPE_EXIT:
+ default:
+ running = 0;
+ break;
+ }
+ free(node);
+ node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+ } /* (node != NULL) */
+ } while (running);
+ return NULL;
+}
+
+int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
+ mm_camera_cmd_cb_t cb,
+ void* user_data)
+{
+ int32_t rc = 0;
+
+ cam_sem_init(&cmd_thread->cmd_sem, 0);
+ cam_sem_init(&cmd_thread->sync_sem, 0);
+ cam_queue_init(&cmd_thread->cmd_queue);
+ cmd_thread->cb = cb;
+ cmd_thread->user_data = user_data;
+ cmd_thread->is_active = TRUE;
+
+ /* launch the thread */
+ pthread_create(&cmd_thread->cmd_pid,
+ NULL,
+ mm_camera_cmd_thread,
+ (void *)cmd_thread);
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_name(const char* name)
+{
+ int32_t rc = 0;
+ /* name the thread */
+ if (name && strlen(name))
+ prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+ return rc;
+}
+
+
+int32_t mm_camera_cmd_thread_stop(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_camera_cmdcb_t");
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_EXIT;
+
+ cam_queue_enq(&cmd_thread->cmd_queue, node);
+ cam_sem_post(&cmd_thread->cmd_sem);
+
+ /* wait until cmd thread exits */
+ if (pthread_join(cmd_thread->cmd_pid, NULL) != 0) {
+ LOGD("pthread dead already\n");
+ }
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_destroy(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ cam_queue_deinit(&cmd_thread->cmd_queue);
+ cam_sem_destroy(&cmd_thread->cmd_sem);
+ cam_sem_destroy(&cmd_thread->sync_sem);
+ memset(cmd_thread, 0, sizeof(mm_camera_cmd_thread_t));
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ rc = mm_camera_cmd_thread_stop(cmd_thread);
+ if (0 == rc) {
+ rc = mm_camera_cmd_thread_destroy(cmd_thread);
+ }
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/Android.mk b/camera/QCamera2/stack/mm-camera-test/Android.mk
new file mode 100644
index 0000000..0ea22e0
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/Android.mk
@@ -0,0 +1,193 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH:=$(call my-dir)
+
+# Build command line test app: mm-qcamera-app
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS:= \
+ -DAMSS_VERSION=$(AMSS_VERSION) \
+ $(mmcamera_debug_defines) \
+ $(mmcamera_debug_cflags) \
+ $(USE_SERVER_TREE)
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_SRC_FILES:= \
+ src/mm_qcamera_main_menu.c \
+ src/mm_qcamera_app.c \
+ src/mm_qcamera_unit_test.c \
+ src/mm_qcamera_video.c \
+ src/mm_qcamera_preview.c \
+ src/mm_qcamera_snapshot.c \
+ src/mm_qcamera_rdi.c \
+ src/mm_qcamera_reprocess.c\
+ src/mm_qcamera_queue.c \
+ src/mm_qcamera_socket.c \
+ src/mm_qcamera_commands.c
+# src/mm_qcamera_dual_test.c \
+
+LOCAL_C_INCLUDES:=$(LOCAL_PATH)/inc
+LOCAL_C_INCLUDES+= \
+ frameworks/native/include/media/openmax \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../mm-camera-interface/inc \
+ $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+ $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+ifeq ($(TARGET_BOARD_PLATFORM),msm8974)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(filter $(TARGET_BOARD_PLATFORM), apq8084 msm8084),$(TARGET_BOARD_PLATFORM))
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8994)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 msm8953)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8226)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8610)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8960)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else ifneq (,$(filter msm8660,$(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID # EBI
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+endif
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_SHARED_LIBRARIES:= \
+ libcutils libdl libmmcamera_interface
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+
+LOCAL_MODULE:= mm-qcamera-app
+
+include $(BUILD_EXECUTABLE)
+
+# Build tuning library
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS:= \
+ -DAMSS_VERSION=$(AMSS_VERSION) \
+ $(mmcamera_debug_defines) \
+ $(mmcamera_debug_cflags) \
+ $(USE_SERVER_TREE)
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_SRC_FILES:= \
+ src/mm_qcamera_main_menu.c \
+ src/mm_qcamera_app.c \
+ src/mm_qcamera_unit_test.c \
+ src/mm_qcamera_video.c \
+ src/mm_qcamera_preview.c \
+ src/mm_qcamera_snapshot.c \
+ src/mm_qcamera_rdi.c \
+ src/mm_qcamera_reprocess.c\
+ src/mm_qcamera_queue.c \
+ src/mm_qcamera_socket.c \
+ src/mm_qcamera_commands.c
+# src/mm_qcamera_dual_test.c \
+
+LOCAL_C_INCLUDES:=$(LOCAL_PATH)/inc
+LOCAL_C_INCLUDES+= \
+ frameworks/native/include/media/openmax \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../mm-camera-interface/inc \
+ $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+ $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+ifeq ($(TARGET_BOARD_PLATFORM),msm8974)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(filter $(TARGET_BOARD_PLATFORM), apq8084 msm8084),$(TARGET_BOARD_PLATFORM))
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8994)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 msm8953)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8226)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8610)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8960)
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else ifneq (,$(filter msm8660,$(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID # EBI
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+ LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+endif
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_SHARED_LIBRARIES:= \
+ libcutils libdl libmmcamera_interface
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+
+LOCAL_MODULE:= libmm-qcamera
+
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h
new file mode 100644
index 0000000..1b94ea8
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h
@@ -0,0 +1,533 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_APP_H__
+#define __MM_QCAMERA_APP_H__
+
+// System dependencies
+#include <pthread.h>
+#include <linux/msm_ion.h>
+#include <linux/msm_mdp.h>
+
+// Camera dependencies
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+#include "mm_qcamera_socket.h"
+
+#define MM_QCAMERA_APP_INTERATION 1
+
+#define MM_APP_MAX_DUMP_FRAME_NUM 1000
+
+#define PREVIEW_BUF_NUM 7
+#define SNAPSHOT_BUF_NUM 10
+#define VIDEO_BUF_NUM 7
+#define ISP_PIX_BUF_NUM 9
+#define STATS_BUF_NUM 4
+#define RDI_BUF_NUM 8
+#define CAPTURE_BUF_NUM 5
+
+#define DEFAULT_PREVIEW_FORMAT CAM_FORMAT_YUV_420_NV21
+#define DEFAULT_PREVIEW_WIDTH 1280
+#define DEFAULT_PREVIEW_HEIGHT 960
+#define DEFAULT_PREVIEW_PADDING CAM_PAD_TO_WORD
+#define DEFAULT_VIDEO_FORMAT CAM_FORMAT_YUV_420_NV12
+#define DEFAULT_VIDEO_WIDTH 800
+#define DEFAULT_VIDEO_HEIGHT 480
+#define DEFAULT_VIDEO_PADDING CAM_PAD_TO_2K
+#define DEFAULT_SNAPSHOT_FORMAT CAM_FORMAT_YUV_420_NV21
+#define DEFAULT_RAW_FORMAT CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG
+
+#define DEFAULT_SNAPSHOT_WIDTH 4160
+#define DEFAULT_SNAPSHOT_HEIGHT 3120
+#define DEFAULT_SNAPSHOT_PADDING CAM_PAD_TO_WORD
+
+#define DEFAULT_OV_FORMAT MDP_Y_CRCB_H2V2
+#define DEFAULT_OV_FORMAT_BPP 3/2
+#define DEFAULT_CAMERA_FORMAT_BPP 3/2
+#define FB_PATH "/dev/graphics/fb0"
+#define BACKLIGHT_CONTROL "/sys/class/leds/lcd-backlight/brightness"
+#define BACKLIGHT_LEVEL "205"
+
+#define ENABLE_REPROCESSING 1
+
+#define INVALID_KEY_PRESS 0
+#define BASE_OFFSET ('Z' - 'A' + 1)
+#define BASE_OFFSET_NUM ('Z' - 'A' + 2)
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+ TUNE_CMD_INIT,
+ TUNE_CMD_GET_LIST,
+ TUNE_CMD_GET_PARAMS,
+ TUNE_CMD_SET_PARAMS,
+ TUNE_CMD_MISC,
+ TUNE_CMD_DEINIT,
+} mm_camera_tune_cmd_t;
+
+typedef enum {
+ TUNE_PREVCMD_INIT,
+ TUNE_PREVCMD_SETDIM,
+ TUNE_PREVCMD_GETINFO,
+ TUNE_PREVCMD_GETCHUNKSIZE,
+ TUNE_PREVCMD_GETFRAME,
+ TUNE_PREVCMD_UNSUPPORTED,
+ TUNE_PREVCMD_DEINIT,
+} mm_camera_tune_prevcmd_t;
+
+typedef void (*cam_stream_user_cb) (mm_camera_buf_def_t *frame);
+typedef void (*prev_callback) (mm_camera_buf_def_t *preview_frame);
+
+
+typedef struct {
+ char *send_buf;
+ uint32_t send_len;
+ void *next;
+} eztune_prevcmd_rsp;
+
+typedef struct {
+ int (*command_process) (void *recv, mm_camera_tune_cmd_t cmd,
+ void *param, char *send_buf, uint32_t send_len);
+ int (*prevcommand_process) (void *recv, mm_camera_tune_prevcmd_t cmd,
+ void *param, char **send_buf, uint32_t *send_len);
+ void (*prevframe_callback) (mm_camera_buf_def_t *preview_frame);
+} mm_camera_tune_func_t;
+
+typedef struct {
+ mm_camera_tune_func_t *func_tbl;
+ void *lib_handle;
+}mm_camera_tuning_lib_params_t;
+
+typedef enum {
+ MM_CAMERA_OK,
+ MM_CAMERA_E_GENERAL,
+ MM_CAMERA_E_NO_MEMORY,
+ MM_CAMERA_E_NOT_SUPPORTED,
+ MM_CAMERA_E_INVALID_INPUT,
+ MM_CAMERA_E_INVALID_OPERATION, /* 5 */
+ MM_CAMERA_E_ENCODE,
+ MM_CAMERA_E_BUFFER_REG,
+ MM_CAMERA_E_PMEM_ALLOC,
+ MM_CAMERA_E_CAPTURE_FAILED,
+ MM_CAMERA_E_CAPTURE_TIMEOUT, /* 10 */
+} mm_camera_status_type_t;
+
+typedef enum {
+ MM_CHANNEL_TYPE_ZSL, /* preview, and snapshot main */
+ MM_CHANNEL_TYPE_CAPTURE, /* snapshot main, and postview */
+ MM_CHANNEL_TYPE_PREVIEW, /* preview only */
+ MM_CHANNEL_TYPE_SNAPSHOT, /* snapshot main only */
+ MM_CHANNEL_TYPE_VIDEO, /* video only */
+ MM_CHANNEL_TYPE_RDI, /* rdi only */
+ MM_CHANNEL_TYPE_REPROCESS,/* offline reprocess */
+ MM_CHANNEL_TYPE_MAX
+} mm_camera_channel_type_t;
+
+typedef struct {
+ int fd;
+ int main_ion_fd;
+ ion_user_handle_t handle;
+ size_t size;
+ parm_buffer_t *data;
+} mm_camera_app_meminfo_t;
+
+typedef struct {
+ mm_camera_buf_def_t buf;
+ mm_camera_app_meminfo_t mem_info;
+} mm_camera_app_buf_t;
+
+typedef struct {
+ uint32_t s_id;
+ mm_camera_stream_config_t s_config;
+ cam_frame_len_offset_t offset;
+ uint8_t num_of_bufs;
+ uint32_t multipleOf;
+ mm_camera_app_buf_t s_bufs[MM_CAMERA_MAX_NUM_FRAMES];
+ mm_camera_app_buf_t s_info_buf;
+} mm_camera_stream_t;
+
+typedef struct {
+ uint32_t ch_id;
+ uint8_t num_streams;
+ mm_camera_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_channel_t;
+
+typedef void (*release_data_fn)(void* data, void *user_data);
+
+typedef struct {
+ struct cam_list list;
+ void* data;
+} camera_q_node;
+
+typedef struct {
+ camera_q_node m_head;
+ int m_size;
+ pthread_mutex_t m_lock;
+ release_data_fn m_dataFn;
+ void * m_userData;
+} mm_camera_queue_t;
+
+typedef struct {
+ uint16_t user_input_display_width;
+ uint16_t user_input_display_height;
+} USER_INPUT_DISPLAY_T;
+
+typedef struct {
+ mm_camera_vtbl_t *cam;
+ uint8_t num_channels;
+ mm_camera_channel_t channels[MM_CHANNEL_TYPE_MAX];
+ mm_jpeg_ops_t jpeg_ops;
+ uint32_t jpeg_hdl;
+ mm_camera_app_buf_t cap_buf;
+ mm_camera_app_buf_t parm_buf;
+
+ uint32_t current_jpeg_sess_id;
+ mm_camera_super_buf_t* current_job_frames;
+ uint32_t current_job_id;
+ mm_camera_app_buf_t jpeg_buf;
+
+ int fb_fd;
+ struct fb_var_screeninfo vinfo;
+ struct mdp_overlay data_overlay;
+ uint32_t slice_size;
+ uint32_t buffer_width, buffer_height;
+ uint32_t buffer_size;
+ cam_format_t buffer_format;
+ uint32_t frame_size;
+ uint32_t frame_count;
+ int encodeJpeg;
+ int zsl_enabled;
+ int8_t focus_supported;
+ cam_stream_user_cb user_preview_cb;
+ cam_stream_user_cb user_metadata_cb;
+ parm_buffer_t *params_buffer;
+ USER_INPUT_DISPLAY_T preview_resolution;
+
+ //Reprocess params&stream
+ int8_t enable_reproc;
+ int32_t reproc_sharpness;
+ cam_denoise_param_t reproc_wnr;
+ int8_t enable_CAC;
+ mm_camera_queue_t pp_frames;
+ mm_camera_stream_t *reproc_stream;
+ metadata_buffer_t *metadata;
+ int8_t is_chromatix_reload;
+} mm_camera_test_obj_t;
+
+typedef struct {
+ void *ptr;
+ void* ptr_jpeg;
+
+ uint8_t (*get_num_of_cameras) ();
+ int32_t (*mm_camera_open) (uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl);
+ uint32_t (*jpeg_open)(mm_jpeg_ops_t *ops, mm_jpeg_mpo_ops_t *mpo_ops,
+ mm_dimension picture_size,
+ cam_jpeg_metadata_t *jpeg_metadata);
+
+} hal_interface_lib_t;
+
+typedef struct {
+ uint8_t num_cameras;
+ hal_interface_lib_t hal_lib;
+} mm_camera_app_t;
+
+typedef struct {
+ uint32_t width;
+ uint32_t height;
+} mm_camera_lib_snapshot_params;
+
+typedef enum {
+ MM_CAMERA_LIB_NO_ACTION = 0,
+ MM_CAMERA_LIB_RAW_CAPTURE,
+ MM_CAMERA_LIB_JPEG_CAPTURE,
+ MM_CAMERA_LIB_SET_FOCUS_MODE,
+ MM_CAMERA_LIB_DO_AF,
+ MM_CAMERA_LIB_CANCEL_AF,
+ MM_CAMERA_LIB_LOCK_AE,
+ MM_CAMERA_LIB_UNLOCK_AE,
+ MM_CAMERA_LIB_LOCK_AWB,
+ MM_CAMERA_LIB_UNLOCK_AWB,
+ MM_CAMERA_LIB_GET_CHROMATIX,
+ MM_CAMERA_LIB_SET_RELOAD_CHROMATIX,
+ MM_CAMERA_LIB_GET_AFTUNE,
+ MM_CAMERA_LIB_SET_RELOAD_AFTUNE,
+ MM_CAMERA_LIB_SET_AUTOFOCUS_TUNING,
+ MM_CAMERA_LIB_ZSL_ENABLE,
+ MM_CAMERA_LIB_EV,
+ MM_CAMERA_LIB_ANTIBANDING,
+ MM_CAMERA_LIB_SET_VFE_COMMAND,
+ MM_CAMERA_LIB_SET_POSTPROC_COMMAND,
+ MM_CAMERA_LIB_SET_3A_COMMAND,
+ MM_CAMERA_LIB_AEC_ENABLE,
+ MM_CAMERA_LIB_AEC_DISABLE,
+ MM_CAMERA_LIB_AF_ENABLE,
+ MM_CAMERA_LIB_AF_DISABLE,
+ MM_CAMERA_LIB_AWB_ENABLE,
+ MM_CAMERA_LIB_AWB_DISABLE,
+ MM_CAMERA_LIB_AEC_FORCE_LC,
+ MM_CAMERA_LIB_AEC_FORCE_GAIN,
+ MM_CAMERA_LIB_AEC_FORCE_EXP,
+ MM_CAMERA_LIB_AEC_FORCE_SNAP_LC,
+ MM_CAMERA_LIB_AEC_FORCE_SNAP_GAIN,
+ MM_CAMERA_LIB_AEC_FORCE_SNAP_EXP,
+ MM_CAMERA_LIB_WB,
+ MM_CAMERA_LIB_EXPOSURE_METERING,
+ MM_CAMERA_LIB_BRIGHTNESS,
+ MM_CAMERA_LIB_CONTRAST,
+ MM_CAMERA_LIB_SATURATION,
+ MM_CAMERA_LIB_SHARPNESS,
+ MM_CAMERA_LIB_ISO,
+ MM_CAMERA_LIB_ZOOM,
+ MM_CAMERA_LIB_BESTSHOT,
+ MM_CAMERA_LIB_FLASH,
+ MM_CAMERA_LIB_FPS_RANGE,
+ MM_CAMERA_LIB_WNR_ENABLE,
+ MM_CAMERA_LIB_SET_TINTLESS,
+} mm_camera_lib_commands;
+
+typedef struct {
+ int32_t stream_width, stream_height;
+ cam_focus_mode_type af_mode;
+} mm_camera_lib_params;
+
+typedef struct {
+ tuneserver_protocol_t *proto;
+ int clientsocket_id;
+ prserver_protocol_t *pr_proto;
+ int pr_clientsocket_id;
+ mm_camera_tuning_lib_params_t tuning_params;
+} tuningserver_t;
+
+typedef struct {
+ mm_camera_app_t app_ctx;
+ mm_camera_test_obj_t test_obj;
+ mm_camera_lib_params current_params;
+ int stream_running;
+ tuningserver_t tsctrl;
+} mm_camera_lib_ctx;
+
+typedef mm_camera_lib_ctx mm_camera_lib_handle;
+
+typedef int (*mm_app_test_t) (mm_camera_app_t *cam_apps);
+typedef struct {
+ mm_app_test_t f;
+ int r;
+} mm_app_tc_t;
+
+extern int mm_app_unit_test_entry(mm_camera_app_t *cam_app);
+extern int mm_app_dual_test_entry(mm_camera_app_t *cam_app);
+extern int setmetainfoCommand(mm_camera_test_obj_t *test_obj,
+ cam_stream_size_info_t *value);
+
+extern void mm_app_dump_frame(mm_camera_buf_def_t *frame,
+ char *name,
+ char *ext,
+ uint32_t frame_idx);
+extern void mm_app_dump_jpeg_frame(const void * data,
+ size_t size,
+ char* name,
+ char* ext,
+ uint32_t index);
+extern int mm_camera_app_timedwait(uint8_t seconds);
+extern int mm_camera_app_wait();
+extern void mm_camera_app_done();
+extern int mm_app_alloc_bufs(mm_camera_app_buf_t* app_bufs,
+ cam_frame_len_offset_t *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t is_streambuf,
+ size_t multipleOf);
+extern int mm_app_release_bufs(uint8_t num_bufs,
+ mm_camera_app_buf_t* app_bufs);
+extern int mm_app_stream_initbuf(cam_frame_len_offset_t *frame_offset_info,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+extern int32_t mm_app_stream_deinitbuf(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data);
+extern int mm_app_cache_ops(mm_camera_app_meminfo_t *mem_info, int cmd);
+extern int32_t mm_app_stream_clean_invalidate_buf(uint32_t index, void *user_data);
+extern int32_t mm_app_stream_invalidate_buf(uint32_t index, void *user_data);
+extern int mm_app_open(mm_camera_app_t *cam_app,
+ int cam_id,
+ mm_camera_test_obj_t *test_obj);
+extern int mm_app_close(mm_camera_test_obj_t *test_obj);
+extern mm_camera_channel_t * mm_app_add_channel(
+ mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata);
+extern int mm_app_del_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel);
+extern mm_camera_stream_t * mm_app_add_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel);
+extern int mm_app_del_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_stream_t *stream);
+extern int mm_app_config_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_stream_t *stream,
+ mm_camera_stream_config_t *config);
+extern int mm_app_start_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel);
+extern int mm_app_stop_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel);
+extern mm_camera_channel_t *mm_app_get_channel_by_type(
+ mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_type_t ch_type);
+
+extern int mm_app_start_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_preview_zsl(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_preview_zsl(mm_camera_test_obj_t *test_obj);
+extern mm_camera_channel_t * mm_app_add_preview_channel(
+ mm_camera_test_obj_t *test_obj);
+extern mm_camera_stream_t * mm_app_add_raw_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst);
+extern int mm_app_stop_and_del_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel);
+extern mm_camera_channel_t * mm_app_add_snapshot_channel(
+ mm_camera_test_obj_t *test_obj);
+extern mm_camera_stream_t * mm_app_add_snapshot_stream(
+ mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst);
+extern mm_camera_stream_t * mm_app_add_metadata_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs);
+extern int mm_app_start_record_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_record_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_record(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_record(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_live_snapshot(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_live_snapshot(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_capture(mm_camera_test_obj_t *test_obj,
+ uint8_t num_snapshots);
+extern int mm_app_stop_capture(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_capture_raw(mm_camera_test_obj_t *test_obj,
+ uint8_t num_snapshots);
+extern int mm_app_stop_capture_raw(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_rdi(mm_camera_test_obj_t *test_obj, uint8_t num_burst);
+extern int mm_app_stop_rdi(mm_camera_test_obj_t *test_obj);
+extern int mm_app_initialize_fb(mm_camera_test_obj_t *test_obj);
+extern int mm_app_close_fb(mm_camera_test_obj_t *test_obj);
+extern int mm_app_fb_write(mm_camera_test_obj_t *test_obj, char *buffer);
+extern int mm_app_overlay_display(mm_camera_test_obj_t *test_obj, int bufferFd);
+extern int mm_app_allocate_ion_memory(mm_camera_app_buf_t *buf, unsigned int ion_type);
+extern int mm_app_deallocate_ion_memory(mm_camera_app_buf_t *buf);
+extern int mm_app_set_params(mm_camera_test_obj_t *test_obj,
+ cam_intf_parm_type_t param_type,
+ int32_t value);
+extern int mm_app_set_preview_fps_range(mm_camera_test_obj_t *test_obj,
+ cam_fps_range_t *fpsRange);
+extern int mm_app_set_face_detection(mm_camera_test_obj_t *test_obj,
+ cam_fd_set_parm_t *fd_set_parm);
+extern int mm_app_set_metadata_usercb(mm_camera_test_obj_t *test_obj,
+ cam_stream_user_cb usercb);
+extern int mm_app_set_face_detection(mm_camera_test_obj_t *test_obj,
+ cam_fd_set_parm_t *fd_set_parm);
+extern int mm_app_set_flash_mode(mm_camera_test_obj_t *test_obj,
+ cam_flash_mode_t flashMode);
+
+/* JIG camera lib interface */
+
+int mm_camera_lib_open(mm_camera_lib_handle *handle, int cam_id);
+int mm_camera_lib_get_caps(mm_camera_lib_handle *handle,
+ cam_capability_t *caps);
+int mm_camera_lib_start_stream(mm_camera_lib_handle *handle);
+int mm_camera_lib_send_command(mm_camera_lib_handle *handle,
+ mm_camera_lib_commands cmd,
+ void *data, void *out_data);
+int mm_camera_lib_stop_stream(mm_camera_lib_handle *handle);
+int mm_camera_lib_number_of_cameras(mm_camera_lib_handle *handle);
+int mm_camera_lib_close(mm_camera_lib_handle *handle);
+int32_t mm_camera_load_tuninglibrary(
+ mm_camera_tuning_lib_params_t *tuning_param);
+int mm_camera_lib_set_preview_usercb(
+ mm_camera_lib_handle *handle, cam_stream_user_cb cb);
+//
+
+int mm_app_start_regression_test(int run_tc);
+int mm_app_load_hal(mm_camera_app_t *my_cam_app);
+
+extern int createEncodingSession(mm_camera_test_obj_t *test_obj,
+ mm_camera_stream_t *m_stream,
+ mm_camera_buf_def_t *m_frame);
+extern int encodeData(mm_camera_test_obj_t *test_obj, mm_camera_super_buf_t* recvd_frame,
+ mm_camera_stream_t *m_stream);
+extern int mm_app_take_picture(mm_camera_test_obj_t *test_obj, uint8_t);
+
+extern mm_camera_channel_t * mm_app_add_reprocess_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_stream_t *source_stream);
+extern int mm_app_start_reprocess(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_reprocess(mm_camera_test_obj_t *test_obj);
+extern int mm_app_do_reprocess(mm_camera_test_obj_t *test_obj,
+ mm_camera_buf_def_t *frame,
+ uint32_t meta_idx,
+ mm_camera_super_buf_t *super_buf,
+ mm_camera_stream_t *src_meta);
+extern void mm_app_release_ppinput(void *data, void *user_data);
+
+extern int mm_camera_queue_init(mm_camera_queue_t *queue,
+ release_data_fn data_rel_fn,
+ void *user_data);
+extern int mm_qcamera_queue_release(mm_camera_queue_t *queue);
+extern int mm_qcamera_queue_isempty(mm_camera_queue_t *queue);
+extern int mm_qcamera_queue_enqueue(mm_camera_queue_t *queue, void *data);
+extern void* mm_qcamera_queue_dequeue(mm_camera_queue_t *queue,
+ int bFromHead);
+extern void mm_qcamera_queue_flush(mm_camera_queue_t *queue);
+
+#endif /* __MM_QCAMERA_APP_H__ */
+
+
+
+
+
+
+
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h
new file mode 100644
index 0000000..1a7ac33
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h
@@ -0,0 +1,68 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_COMMANDS_H__
+#define __MM_QCAMERA_COMMANDS_H__
+
+// Camera dependencies
+#include "mm_qcamera_socket.h"
+#include "mm_qcamera_app.h"
+
+int tuneserver_close_cam(mm_camera_lib_handle *lib_handle);
+int tuneserver_stop_cam(mm_camera_lib_handle *lib_handle);
+int tuneserver_open_cam(mm_camera_lib_handle *lib_handle);
+
+int tuneserver_initialize_tuningp(void * ctrl, int client_socket_id,
+ char *send_buf, uint32_t send_len);
+int tuneserver_deinitialize_tuningp(void * ctrl, int client_socket_id,
+ char *send_buf, uint32_t send_len);
+int tuneserver_process_get_list_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len);
+int tuneserver_process_misc_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len);
+int tuneserver_process_get_params_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len);
+int tuneserver_process_set_params_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len);
+
+int tuneserver_initialize_prevtuningp(void * ctrl,
+ int pr_client_socket_id, cam_dimension_t dimension,
+ char **send_buf, uint32_t *send_len);
+int tuneserver_deinitialize_prevtuningp(void * ctrl,
+ char **send_buf, uint32_t *send_len);
+int tuneserver_preview_getinfo(void * ctrl,
+ char **send_buf, uint32_t *send_len);
+int tuneserver_preview_getchunksize(void * ctrl,
+ char **send_buf, uint32_t *send_len);
+int tuneserver_preview_getframe(void * ctrl,
+ char **send_buf, uint32_t *send_len);
+int tuneserver_preview_unsupported(void * ctrl,
+ char **send_buf, uint32_t *send_len);
+
+#endif /*__MM_QCAMERA_COMMANDS_H__*/
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h
new file mode 100644
index 0000000..acd197e
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h
@@ -0,0 +1,38 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_DBG_H__
+#define __MM_QCAMERA_DBG_H__
+
+#ifdef QCAMERA_REDEFINE_LOG
+#define CAM_MODULE CAM_HAL_MODULE
+#include "mm_camera_dbg.h"
+#endif
+
+#endif /* __MM_QCAMERA_DBG_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h
new file mode 100644
index 0000000..1b9eb4e
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h
@@ -0,0 +1,439 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_MAIN_MENU_H__
+#define __MM_QCAMERA_MAIN_MENU_H__
+
+// Camera dependencies
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+
+#define VIDEO_BUFFER_SIZE (PREVIEW_WIDTH * PREVIEW_HEIGHT * 3/2)
+#define THUMBNAIL_BUFFER_SIZE (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define SNAPSHOT_BUFFER_SIZE (PICTURE_WIDTH * PICTURE_HEIGHT * 3/2)
+
+/*===========================================================================
+ * Macro
+ *===========================================================================*/
+#define PREVIEW_FRAMES_NUM 5
+#define VIDEO_FRAMES_NUM 5
+#define THUMBNAIL_FRAMES_NUM 1
+#define SNAPSHOT_FRAMES_NUM 1
+#define MAX_NUM_FORMAT 32
+
+typedef enum
+{
+ START_PREVIEW,
+ STOP_PREVIEW,
+ SET_WHITE_BALANCE,
+ SET_TINTLESS_ENABLE,
+ SET_TINTLESS_DISABLE,
+ SET_EXP_METERING,
+ GET_CTRL_VALUE,
+ TOGGLE_AFR,
+ SET_ISO,
+ BRIGHTNESS_GOTO_SUBMENU,
+ CONTRAST_GOTO_SUBMENU,
+ EV_GOTO_SUBMENU,
+ SATURATION_GOTO_SUBMENU,
+ SET_ZOOM,
+ SET_SHARPNESS,
+ TAKE_JPEG_SNAPSHOT,
+ START_RECORDING,
+ STOP_RECORDING,
+ BEST_SHOT,
+ LIVE_SHOT,
+ FLASH_MODES,
+ TOGGLE_ZSL,
+ TAKE_RAW_SNAPSHOT,
+ SWITCH_SNAP_RESOLUTION,
+ TOGGLE_WNR,
+ EXIT
+} Camera_main_menu_t;
+
+typedef enum
+{
+ ACTION_NO_ACTION,
+ ACTION_START_PREVIEW,
+ ACTION_STOP_PREVIEW,
+ ACTION_SET_WHITE_BALANCE,
+ ACTION_SET_TINTLESS_ENABLE,
+ ACTION_SET_TINTLESS_DISABLE,
+ ACTION_SET_EXP_METERING,
+ ACTION_GET_CTRL_VALUE,
+ ACTION_TOGGLE_AFR,
+ ACTION_SET_ISO,
+ ACTION_BRIGHTNESS_INCREASE,
+ ACTION_BRIGHTNESS_DECREASE,
+ ACTION_CONTRAST_INCREASE,
+ ACTION_CONTRAST_DECREASE,
+ ACTION_EV_INCREASE,
+ ACTION_EV_DECREASE,
+ ACTION_SATURATION_INCREASE,
+ ACTION_SATURATION_DECREASE,
+ ACTION_SET_ZOOM,
+ ACTION_SHARPNESS_INCREASE,
+ ACTION_SHARPNESS_DECREASE,
+ ACTION_TAKE_JPEG_SNAPSHOT,
+ ACTION_START_RECORDING,
+ ACTION_STOP_RECORDING,
+ ACTION_SET_BESTSHOT_MODE,
+ ACTION_TAKE_LIVE_SNAPSHOT,
+ ACTION_SET_FLASH_MODE,
+ ACTION_SWITCH_CAMERA,
+ ACTION_TOGGLE_ZSL,
+ ACTION_TAKE_RAW_SNAPSHOT,
+ ACTION_SWITCH_RESOLUTION,
+ ACTION_TOGGLE_WNR,
+ ACTION_EXIT
+} camera_action_t;
+
+#define INVALID_KEY_PRESS 0
+#define BASE_OFFSET ('Z' - 'A' + 1)
+#define BASE_OFFSET_NUM ('Z' - 'A' + 2)
+#define PAD_TO_WORD(a) (((a)+3)&~3)
+
+
+#define SQCIF_WIDTH 128
+#define SQCIF_HEIGHT 96
+#define QCIF_WIDTH 176
+#define QCIF_HEIGHT 144
+#define QVGA_WIDTH 320
+#define QVGA_HEIGHT 240
+#define HD_THUMBNAIL_WIDTH 256
+#define HD_THUMBNAIL_HEIGHT 144
+#define CIF_WIDTH 352
+#define CIF_HEIGHT 288
+#define VGA_WIDTH 640
+#define VGA_HEIGHT 480
+#define WVGA_WIDTH 800
+#define WVGA_HEIGHT 480
+#define WVGA_PLUS_WIDTH 960
+#define WVGA_PLUS_HEIGHT 720
+
+#define MP1_WIDTH 1280
+#define MP1_HEIGHT 960
+#define MP2_WIDTH 1600
+#define MP2_HEIGHT 1200
+#define MP3_WIDTH 2048
+#define MP3_HEIGHT 1536
+#define MP5_WIDTH 2592
+#define MP5_HEIGHT 1944
+#define MP8_WIDTH 3264
+#define MP8_HEIGHT 2448
+#define MP12_WIDTH 4000
+#define MP12_HEIGHT 3000
+
+#define SVGA_WIDTH 800
+#define SVGA_HEIGHT 600
+#define XGA_WIDTH 1024
+#define XGA_HEIGHT 768
+#define HD720_WIDTH 1280
+#define HD720_HEIGHT 720
+#define HD720_PLUS_WIDTH 1440
+#define HD720_PLUS_HEIGHT 1080
+#define WXGA_WIDTH 1280
+#define WXGA_HEIGHT 768
+#define HD1080_WIDTH 1920
+#define HD1080_HEIGHT 1080
+
+
+#define ONEMP_WIDTH 1280
+#define SXGA_WIDTH 1280
+#define UXGA_WIDTH 1600
+#define QXGA_WIDTH 2048
+#define FIVEMP_WIDTH 2560
+
+
+#define ONEMP_HEIGHT 960
+#define SXGA_HEIGHT 1024
+#define UXGA_HEIGHT 1200
+#define QXGA_HEIGHT 1536
+#define FIVEMP_HEIGHT 1920
+
+
+typedef enum
+{
+ RESOLUTION_MIN,
+ QCIF = RESOLUTION_MIN,
+ QVGA,
+ VGA,
+ WVGA,
+ WVGA_PLUS ,
+ HD720,
+ HD720_PLUS,
+ HD1080,
+ RESOLUTION_PREVIEW_VIDEO_MAX = HD1080,
+ WXGA,
+ MP1,
+ MP2,
+ MP3,
+ MP5,
+ MP8,
+ MP12,
+ RESOLUTION_MAX = MP12,
+} Camera_Resolution;
+
+typedef struct{
+ uint16_t width;
+ uint16_t height;
+ char * name;
+ char * str_name;
+ int supported;
+} DIMENSION_TBL_T;
+
+typedef enum {
+ WHITE_BALANCE_STATE,
+ WHITE_BALANCE_TEMPERATURE,
+ BRIGHTNESS_CTRL,
+ EV,
+ CONTRAST_CTRL,
+ SATURATION_CTRL,
+ SHARPNESS_CTRL
+} Get_Ctrl_modes;
+
+typedef enum {
+ AUTO_EXP_FRAME_AVG,
+ AUTO_EXP_CENTER_WEIGHTED,
+ AUTO_EXP_SPOT_METERING,
+ AUTO_EXP_SMART_METERING,
+ AUTO_EXP_USER_METERING,
+ AUTO_EXP_SPOT_METERING_ADV,
+ AUTO_EXP_CENTER_WEIGHTED_ADV,
+ AUTO_EXP_MAX
+} Exp_Metering_modes;
+
+typedef enum {
+ ISO_AUTO,
+ ISO_DEBLUR,
+ ISO_100,
+ ISO_200,
+ ISO_400,
+ ISO_800,
+ ISO_1600,
+ ISO_MAX
+} ISO_modes;
+
+typedef enum {
+ BESTSHOT_AUTO,
+ BESTSHOT_ACTION,
+ BESTSHOT_PORTRAIT,
+ BESTSHOT_LANDSCAPE,
+ BESTSHOT_NIGHT,
+ BESTSHOT_NIGHT_PORTRAIT,
+ BESTSHOT_THEATRE,
+ BESTSHOT_BEACH,
+ BESTSHOT_SNOW,
+ BESTSHOT_SUNSET,
+ BESTSHOT_ANTISHAKE,
+ BESTSHOT_FIREWORKS,
+ BESTSHOT_SPORTS,
+ BESTSHOT_PARTY,
+ BESTSHOT_CANDLELIGHT,
+ BESTSHOT_ASD,
+ BESTSHOT_BACKLIGHT,
+ BESTSHOT_FLOWERS,
+ BESTSHOT_AR,
+ BESTSHOT_HDR,
+ BESTSHOT_MAX
+}Bestshot_modes;
+
+typedef enum {
+ FLASH_MODE_OFF,
+ FLASH_MODE_AUTO,
+ FLASH_MODE_ON,
+ FLASH_MODE_TORCH,
+ FLASH_MODE_MAX,
+}Flash_modes;
+
+typedef enum {
+ WB_AUTO,
+ WB_INCANDESCENT,
+ WB_FLUORESCENT,
+ WB_WARM_FLUORESCENT,
+ WB_DAYLIGHT,
+ WB_CLOUDY_DAYLIGHT,
+ WB_TWILIGHT,
+ WB_SHADE,
+ WB_MAX
+} White_Balance_modes;
+
+typedef enum
+{
+ MENU_ID_MAIN,
+ MENU_ID_WHITEBALANCECHANGE,
+ MENU_ID_EXPMETERINGCHANGE,
+ MENU_ID_GET_CTRL_VALUE,
+ MENU_ID_TOGGLEAFR,
+ MENU_ID_ISOCHANGE,
+ MENU_ID_BRIGHTNESSCHANGE,
+ MENU_ID_CONTRASTCHANGE,
+ MENU_ID_EVCHANGE,
+ MENU_ID_SATURATIONCHANGE,
+ MENU_ID_ZOOMCHANGE,
+ MENU_ID_SHARPNESSCHANGE,
+ MENU_ID_BESTSHOT,
+ MENU_ID_FLASHMODE,
+ MENU_ID_SENSORS,
+ MENU_ID_SWITCH_RES,
+ MENU_ID_INVALID,
+} menu_id_change_t;
+
+typedef enum
+{
+ DECREASE_ZOOM,
+ INCREASE_ZOOM,
+ INCREASE_STEP_ZOOM,
+ DECREASE_STEP_ZOOM,
+} Camera_Zoom;
+
+typedef enum
+{
+ INC_CONTRAST,
+ DEC_CONTRAST,
+} Camera_Contrast_changes;
+
+typedef enum
+{
+ INC_BRIGHTNESS,
+ DEC_BRIGHTNESS,
+} Camera_Brightness_changes;
+
+typedef enum
+{
+ INCREASE_EV,
+ DECREASE_EV,
+} Camera_EV_changes;
+
+typedef enum {
+ INC_SATURATION,
+ DEC_SATURATION,
+} Camera_Saturation_changes;
+
+typedef enum
+{
+ INC_ISO,
+ DEC_ISO,
+} Camera_ISO_changes;
+
+typedef enum
+{
+ INC_SHARPNESS,
+ DEC_SHARPNESS,
+} Camera_Sharpness_changes;
+
+typedef enum {
+ ZOOM_IN,
+ ZOOM_OUT,
+} Zoom_direction;
+
+typedef struct{
+ Camera_main_menu_t main_menu;
+ char * menu_name;
+} CAMERA_MAIN_MENU_TBL_T;
+
+typedef struct{
+ char * menu_name;
+ int present;
+} CAMERA_SENSOR_MENU_TLB_T;
+
+typedef struct{
+ Camera_Resolution cs_id;
+ uint16_t width;
+ uint16_t height;
+ char * name;
+ char * str_name;
+} PREVIEW_DIMENSION_TBL_T;
+
+typedef struct {
+ White_Balance_modes wb_id;
+ char * wb_name;
+} WHITE_BALANCE_TBL_T;
+
+typedef struct {
+ Get_Ctrl_modes get_ctrl_id;
+ char * get_ctrl_name;
+} GET_CTRL_TBL_T;
+
+typedef struct{
+ Exp_Metering_modes exp_metering_id;
+ char * exp_metering_name;
+} EXP_METERING_TBL_T;
+
+typedef struct {
+ Bestshot_modes bs_id;
+ char *name;
+} BESTSHOT_MODE_TBT_T;
+
+typedef struct {
+ Flash_modes bs_id;
+ char *name;
+} FLASH_MODE_TBL_T;
+
+typedef struct {
+ ISO_modes iso_modes;
+ char *iso_modes_name;
+} ISO_TBL_T;
+
+typedef struct {
+ Zoom_direction zoom_direction;
+ char * zoom_direction_name;
+} ZOOM_TBL_T;
+
+typedef struct {
+ Camera_Sharpness_changes sharpness_change;
+ char *sharpness_change_name;
+} SHARPNESS_TBL_T;
+
+typedef struct {
+ Camera_Brightness_changes bc_id;
+ char * brightness_name;
+} CAMERA_BRIGHTNESS_TBL_T;
+
+typedef struct {
+ Camera_Contrast_changes cc_id;
+ char * contrast_name;
+} CAMERA_CONTRST_TBL_T;
+
+typedef struct {
+ Camera_EV_changes ec_id;
+ char * EV_name;
+} CAMERA_EV_TBL_T;
+
+typedef struct {
+ Camera_Saturation_changes sc_id;
+ char * saturation_name;
+} CAMERA_SATURATION_TBL_T;
+
+typedef struct {
+ Camera_Sharpness_changes bc_id;
+ char * sharpness_name;
+} CAMERA_SHARPNESS_TBL_T;
+
+#endif /* __MM_QCAMERA_MAIN_MENU_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h
new file mode 100644
index 0000000..186c109
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h
@@ -0,0 +1,113 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_SOCKET_H__
+#define __MM_QCAMERA_SOCKET_H__
+
+// System dependencies
+#include <arpa/inet.h>
+
+#undef __FD_SET
+#define __FD_SET(fd, fdsetp) \
+ (((fd_set *)(fdsetp))->fds_bits[(fd) >> 5] |= (1LU<<((fd) & 31)))
+
+#undef __FD_CLR
+#define __FD_CLR(fd, fdsetp) \
+ (((fd_set *)(fdsetp))->fds_bits[(fd) >> 5] &= ~(1LU<<((fd) & 31)))
+
+#undef __FD_ISSET
+#define __FD_ISSET(fd, fdsetp) \
+ ((((fd_set *)(fdsetp))->fds_bits[(fd) >> 5] & (1LU<<((fd) & 31))) != 0)
+
+#undef __FD_ZERO
+#define __FD_ZERO(fdsetp) \
+ (memset (fdsetp, 0, sizeof (*(fd_set *)(fdsetp))))
+
+#define TUNESERVER_MAX_RECV 2048
+#define TUNESERVER_MAX(a, b) (((a) > (b)) ? (a) : (b))
+
+#define TUNESERVER_GET_LIST 1014
+#define TUNESERVER_GET_PARMS 1015
+#define TUNESERVER_SET_PARMS 1016
+#define TUNESERVER_MISC_CMDS 1021
+
+#define TUNE_PREV_GET_INFO 0x0001
+#define TUNE_PREV_CH_CNK_SIZE 0x0002
+#define TUNE_PREV_GET_PREV_FRAME 0x0003
+#define TUNE_PREV_GET_JPG_SNAP 0x0004
+#define TUNE_PREV_GET_RAW_SNAP 0x0005
+#define TUNE_PREV_GET_RAW_PREV 0x0006
+
+typedef struct {
+ char data[128];
+} tuneserver_misc_cmd;
+
+typedef enum {
+ TUNESERVER_RECV_COMMAND = 1,
+ TUNESERVER_RECV_PAYLOAD_SIZE,
+ TUNESERVER_RECV_PAYLOAD,
+ TUNESERVER_RECV_RESPONSE,
+ TUNESERVERER_RECV_INVALID,
+} tuneserver_recv_cmd_t;
+
+typedef struct {
+ uint16_t current_cmd;
+ tuneserver_recv_cmd_t next_recv_code;
+ uint32_t next_recv_len;
+ void *recv_buf;
+ uint32_t recv_len;
+ uint32_t send_len;
+ void *send_buf;
+} tuneserver_protocol_t;
+
+typedef enum {
+ TUNE_PREV_RECV_COMMAND = 1,
+ TUNE_PREV_RECV_NEWCNKSIZE,
+ TUNE_PREV_RECV_INVALID
+} tune_prev_cmd_t;
+
+typedef struct _eztune_preview_protocol_t {
+ uint16_t current_cmd;
+ tune_prev_cmd_t next_recv_code;
+ uint32_t next_recv_len;
+ int32_t send_len;
+ char* send_buf;
+ uint32_t send_buf_size;
+ uint32_t new_cnk_size;
+ uint32_t new_cmd_available;
+} prserver_protocol_t;
+
+typedef union {
+ struct sockaddr addr;
+ struct sockaddr_in addr_in;
+} mm_qcamera_sock_addr_t;
+
+int eztune_server_start(void *lib_handle);
+
+#endif /*__MM_QCAMERA_SOCKET_H__*/
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c
new file mode 100644
index 0000000..5735ed6
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c
@@ -0,0 +1,2404 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS"AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <dlfcn.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <linux/msm_ion.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+
+// Camera dependencies
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+static pthread_mutex_t app_mutex;
+static int thread_status = 0;
+static pthread_cond_t app_cond_v;
+
+#define MM_QCAMERA_APP_NANOSEC_SCALE 1000000000
+
+int mm_camera_app_timedwait(uint8_t seconds)
+{
+ int rc = 0;
+ pthread_mutex_lock(&app_mutex);
+ if(FALSE == thread_status) {
+ struct timespec tw;
+ memset(&tw, 0, sizeof tw);
+ tw.tv_sec = 0;
+ tw.tv_nsec = time(0) + seconds * MM_QCAMERA_APP_NANOSEC_SCALE;
+
+ rc = pthread_cond_timedwait(&app_cond_v, &app_mutex,&tw);
+ thread_status = FALSE;
+ }
+ pthread_mutex_unlock(&app_mutex);
+ return rc;
+}
+
+int mm_camera_app_wait()
+{
+ int rc = 0;
+ pthread_mutex_lock(&app_mutex);
+ if(FALSE == thread_status){
+ pthread_cond_wait(&app_cond_v, &app_mutex);
+ }
+ thread_status = FALSE;
+ pthread_mutex_unlock(&app_mutex);
+ return rc;
+}
+
+void mm_camera_app_done()
+{
+ pthread_mutex_lock(&app_mutex);
+ thread_status = TRUE;
+ pthread_cond_signal(&app_cond_v);
+ pthread_mutex_unlock(&app_mutex);
+}
+
+int mm_app_load_hal(mm_camera_app_t *my_cam_app)
+{
+ memset(&my_cam_app->hal_lib, 0, sizeof(hal_interface_lib_t));
+ my_cam_app->hal_lib.ptr = dlopen("libmmcamera_interface.so", RTLD_NOW);
+ my_cam_app->hal_lib.ptr_jpeg = dlopen("libmmjpeg_interface.so", RTLD_NOW);
+ if (!my_cam_app->hal_lib.ptr || !my_cam_app->hal_lib.ptr_jpeg) {
+ LOGE("Error opening HAL library %s\n", dlerror());
+ return -MM_CAMERA_E_GENERAL;
+ }
+ *(void **)&(my_cam_app->hal_lib.get_num_of_cameras) =
+ dlsym(my_cam_app->hal_lib.ptr, "get_num_of_cameras");
+ *(void **)&(my_cam_app->hal_lib.mm_camera_open) =
+ dlsym(my_cam_app->hal_lib.ptr, "camera_open");
+ *(void **)&(my_cam_app->hal_lib.jpeg_open) =
+ dlsym(my_cam_app->hal_lib.ptr_jpeg, "jpeg_open");
+
+ if (my_cam_app->hal_lib.get_num_of_cameras == NULL ||
+ my_cam_app->hal_lib.mm_camera_open == NULL ||
+ my_cam_app->hal_lib.jpeg_open == NULL) {
+ LOGE("Error loading HAL sym %s\n", dlerror());
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ my_cam_app->num_cameras = my_cam_app->hal_lib.get_num_of_cameras();
+ LOGD("num_cameras = %d\n", my_cam_app->num_cameras);
+
+ return MM_CAMERA_OK;
+}
+
+int mm_app_allocate_ion_memory(mm_camera_app_buf_t *buf,
+ __unused unsigned int ion_type)
+{
+ int rc = MM_CAMERA_OK;
+ struct ion_handle_data handle_data;
+ struct ion_allocation_data alloc;
+ struct ion_fd_data ion_info_fd;
+ int main_ion_fd = -1;
+ void *data = NULL;
+
+ main_ion_fd = open("/dev/ion", O_RDONLY);
+ if (main_ion_fd <= 0) {
+ LOGE("Ion dev open failed %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+
+ memset(&alloc, 0, sizeof(alloc));
+ alloc.len = buf->mem_info.size;
+ /* to make it page size aligned */
+ alloc.len = (alloc.len + 4095U) & (~4095U);
+ alloc.align = 4096;
+ alloc.flags = ION_FLAG_CACHED;
+ alloc.heap_id_mask = ION_HEAP(ION_SYSTEM_HEAP_ID);
+ rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+ if (rc < 0) {
+ LOGE("ION allocation failed %s with rc = %d \n",strerror(errno), rc);
+ goto ION_ALLOC_FAILED;
+ }
+
+ memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+ ion_info_fd.handle = alloc.handle;
+ rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+ if (rc < 0) {
+ LOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+
+ data = mmap(NULL,
+ alloc.len,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ ion_info_fd.fd,
+ 0);
+
+ if (data == MAP_FAILED) {
+ LOGE("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }
+ buf->mem_info.main_ion_fd = main_ion_fd;
+ buf->mem_info.fd = ion_info_fd.fd;
+ buf->mem_info.handle = ion_info_fd.handle;
+ buf->mem_info.size = alloc.len;
+ buf->mem_info.data = data;
+ return MM_CAMERA_OK;
+
+ION_MAP_FAILED:
+ memset(&handle_data, 0, sizeof(handle_data));
+ handle_data.handle = ion_info_fd.handle;
+ ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(main_ion_fd);
+ION_OPEN_FAILED:
+ return -MM_CAMERA_E_GENERAL;
+}
+
+int mm_app_deallocate_ion_memory(mm_camera_app_buf_t *buf)
+{
+ struct ion_handle_data handle_data;
+ int rc = 0;
+
+ rc = munmap(buf->mem_info.data, buf->mem_info.size);
+
+ if (buf->mem_info.fd >= 0) {
+ close(buf->mem_info.fd);
+ buf->mem_info.fd = -1;
+ }
+
+ if (buf->mem_info.main_ion_fd >= 0) {
+ memset(&handle_data, 0, sizeof(handle_data));
+ handle_data.handle = buf->mem_info.handle;
+ ioctl(buf->mem_info.main_ion_fd, ION_IOC_FREE, &handle_data);
+ close(buf->mem_info.main_ion_fd);
+ buf->mem_info.main_ion_fd = -1;
+ }
+ return rc;
+}
+
+/* cmd = ION_IOC_CLEAN_CACHES, ION_IOC_INV_CACHES, ION_IOC_CLEAN_INV_CACHES */
+int mm_app_cache_ops(mm_camera_app_meminfo_t *mem_info,
+ int cmd)
+{
+ struct ion_flush_data cache_inv_data;
+ struct ion_custom_data custom_data;
+ int ret = MM_CAMERA_OK;
+
+#ifdef USE_ION
+ if (NULL == mem_info) {
+ LOGE("mem_info is NULL, return here");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+ memset(&custom_data, 0, sizeof(custom_data));
+ cache_inv_data.vaddr = mem_info->data;
+ cache_inv_data.fd = mem_info->fd;
+ cache_inv_data.handle = mem_info->handle;
+ cache_inv_data.length = (unsigned int)mem_info->size;
+ custom_data.cmd = (unsigned int)cmd;
+ custom_data.arg = (unsigned long)&cache_inv_data;
+
+ LOGD("addr = %p, fd = %d, handle = %lx length = %d, ION Fd = %d",
+ cache_inv_data.vaddr, cache_inv_data.fd,
+ (unsigned long)cache_inv_data.handle, cache_inv_data.length,
+ mem_info->main_ion_fd);
+ if(mem_info->main_ion_fd >= 0) {
+ if(ioctl(mem_info->main_ion_fd, ION_IOC_CUSTOM, &custom_data) < 0) {
+ LOGE("Cache Invalidate failed\n");
+ ret = -MM_CAMERA_E_GENERAL;
+ }
+ }
+#endif
+
+ return ret;
+}
+
+void mm_app_dump_frame(mm_camera_buf_def_t *frame,
+ char *name,
+ char *ext,
+ uint32_t frame_idx)
+{
+ char file_name[FILENAME_MAX];
+ int file_fd;
+ int i;
+ int offset = 0;
+ if ( frame != NULL) {
+ snprintf(file_name, sizeof(file_name),
+ QCAMERA_DUMP_FRM_LOCATION"%s_%04d.%s", name, frame_idx, ext);
+ file_fd = open(file_name, O_RDWR | O_CREAT, 0777);
+ if (file_fd < 0) {
+ LOGE("cannot open file %s \n", file_name);
+ } else {
+ for (i = 0; i < frame->planes_buf.num_planes; i++) {
+ LOGD("saving file from address: %p, data offset: %d, "
+ "length: %d \n", frame->buffer,
+ frame->planes_buf.planes[i].data_offset, frame->planes_buf.planes[i].length);
+ write(file_fd,
+ (uint8_t *)frame->buffer + offset,
+ frame->planes_buf.planes[i].length);
+ offset += (int)frame->planes_buf.planes[i].length;
+ }
+
+ close(file_fd);
+ LOGD("dump %s", file_name);
+ }
+ }
+}
+
+void mm_app_dump_jpeg_frame(const void * data, size_t size, char* name,
+ char* ext, uint32_t index)
+{
+ char buf[FILENAME_MAX];
+ int file_fd;
+ if ( data != NULL) {
+ snprintf(buf, sizeof(buf),
+ QCAMERA_DUMP_FRM_LOCATION"test/%s_%u.%s", name, index, ext);
+ LOGD("%s size =%zu, jobId=%u", buf, size, index);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ write(file_fd, data, size);
+ close(file_fd);
+ }
+}
+
+int mm_app_alloc_bufs(mm_camera_app_buf_t* app_bufs,
+ cam_frame_len_offset_t *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t is_streambuf,
+ size_t multipleOf)
+{
+ uint32_t i, j;
+ unsigned int ion_type = 0x1 << CAMERA_ION_FALLBACK_HEAP_ID;
+
+ if (is_streambuf) {
+ ion_type |= 0x1 << CAMERA_ION_HEAP_ID;
+ }
+
+ for (i = 0; i < num_bufs ; i++) {
+ if ( 0 < multipleOf ) {
+ size_t m = frame_offset_info->frame_len / multipleOf;
+ if ( ( frame_offset_info->frame_len % multipleOf ) != 0 ) {
+ m++;
+ }
+ app_bufs[i].mem_info.size = m * multipleOf;
+ } else {
+ app_bufs[i].mem_info.size = frame_offset_info->frame_len;
+ }
+ mm_app_allocate_ion_memory(&app_bufs[i], ion_type);
+
+ app_bufs[i].buf.buf_idx = i;
+ app_bufs[i].buf.planes_buf.num_planes = (int8_t)frame_offset_info->num_planes;
+ app_bufs[i].buf.fd = app_bufs[i].mem_info.fd;
+ app_bufs[i].buf.frame_len = app_bufs[i].mem_info.size;
+ app_bufs[i].buf.buffer = app_bufs[i].mem_info.data;
+ app_bufs[i].buf.mem_info = (void *)&app_bufs[i].mem_info;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ app_bufs[i].buf.planes_buf.planes[0].length = frame_offset_info->mp[0].len;
+ app_bufs[i].buf.planes_buf.planes[0].m.userptr =
+ (long unsigned int)app_bufs[i].buf.fd;
+ app_bufs[i].buf.planes_buf.planes[0].data_offset = frame_offset_info->mp[0].offset;
+ app_bufs[i].buf.planes_buf.planes[0].reserved[0] = 0;
+ for (j = 1; j < (uint8_t)frame_offset_info->num_planes; j++) {
+ app_bufs[i].buf.planes_buf.planes[j].length = frame_offset_info->mp[j].len;
+ app_bufs[i].buf.planes_buf.planes[j].m.userptr =
+ (long unsigned int)app_bufs[i].buf.fd;
+ app_bufs[i].buf.planes_buf.planes[j].data_offset = frame_offset_info->mp[j].offset;
+ app_bufs[i].buf.planes_buf.planes[j].reserved[0] =
+ app_bufs[i].buf.planes_buf.planes[j-1].reserved[0] +
+ app_bufs[i].buf.planes_buf.planes[j-1].length;
+ }
+ }
+ LOGD("X");
+ return MM_CAMERA_OK;
+}
+
+int mm_app_release_bufs(uint8_t num_bufs,
+ mm_camera_app_buf_t* app_bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+
+ LOGD("E");
+
+ for (i = 0; i < num_bufs; i++) {
+ rc = mm_app_deallocate_ion_memory(&app_bufs[i]);
+ }
+ memset(app_bufs, 0, num_bufs * sizeof(mm_camera_app_buf_t));
+ LOGD("X");
+ return rc;
+}
+
+int mm_app_stream_initbuf(cam_frame_len_offset_t *frame_offset_info,
+ uint8_t *num_bufs,
+ uint8_t **initial_reg_flag,
+ mm_camera_buf_def_t **bufs,
+ mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data)
+{
+ mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+ mm_camera_buf_def_t *pBufs = NULL;
+ uint8_t *reg_flags = NULL;
+ int i, rc;
+
+ stream->offset = *frame_offset_info;
+
+ LOGD("alloc buf for stream_id %d, len=%d, num planes: %d, offset: %d",
+ stream->s_id,
+ frame_offset_info->frame_len,
+ frame_offset_info->num_planes,
+ frame_offset_info->mp[1].offset);
+
+ if (stream->num_of_bufs > CAM_MAX_NUM_BUFS_PER_STREAM)
+ stream->num_of_bufs = CAM_MAX_NUM_BUFS_PER_STREAM;
+
+ pBufs = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t) * stream->num_of_bufs);
+ reg_flags = (uint8_t *)malloc(sizeof(uint8_t) * stream->num_of_bufs);
+ if (pBufs == NULL || reg_flags == NULL) {
+ LOGE("No mem for bufs");
+ if (pBufs != NULL) {
+ free(pBufs);
+ }
+ if (reg_flags != NULL) {
+ free(reg_flags);
+ }
+ return -1;
+ }
+
+ rc = mm_app_alloc_bufs(&stream->s_bufs[0],
+ frame_offset_info,
+ stream->num_of_bufs,
+ 1,
+ stream->multipleOf);
+
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_stream_alloc_bufs err = %d", rc);
+ free(pBufs);
+ free(reg_flags);
+ return rc;
+ }
+
+ for (i = 0; i < stream->num_of_bufs; i++) {
+ /* mapping stream bufs first */
+ pBufs[i] = stream->s_bufs[i].buf;
+ reg_flags[i] = 1;
+ rc = ops_tbl->map_ops(pBufs[i].buf_idx,
+ -1,
+ pBufs[i].fd,
+ (uint32_t)pBufs[i].frame_len,
+ CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mapping buf[%d] err = %d", i, rc);
+ break;
+ }
+ }
+
+ if (rc != MM_CAMERA_OK) {
+ int j;
+ for (j=0; j>i; j++) {
+ ops_tbl->unmap_ops(pBufs[j].buf_idx, -1,
+ CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ }
+ mm_app_release_bufs(stream->num_of_bufs, &stream->s_bufs[0]);
+ free(pBufs);
+ free(reg_flags);
+ return rc;
+ }
+
+ *num_bufs = stream->num_of_bufs;
+ *bufs = pBufs;
+ *initial_reg_flag = reg_flags;
+
+ LOGD("X");
+ return rc;
+}
+
+int32_t mm_app_stream_deinitbuf(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+ void *user_data)
+{
+ mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+ int i;
+
+ for (i = 0; i < stream->num_of_bufs ; i++) {
+ /* mapping stream bufs first */
+ ops_tbl->unmap_ops(stream->s_bufs[i].buf.buf_idx, -1,
+ CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+ }
+
+ mm_app_release_bufs(stream->num_of_bufs, &stream->s_bufs[0]);
+
+ LOGD("X");
+ return 0;
+}
+
+int32_t mm_app_stream_clean_invalidate_buf(uint32_t index, void *user_data)
+{
+ mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+ return mm_app_cache_ops(&stream->s_bufs[index].mem_info,
+ ION_IOC_CLEAN_INV_CACHES);
+}
+
+int32_t mm_app_stream_invalidate_buf(uint32_t index, void *user_data)
+{
+ mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+ return mm_app_cache_ops(&stream->s_bufs[index].mem_info, ION_IOC_INV_CACHES);
+}
+
+static void notify_evt_cb(uint32_t camera_handle,
+ mm_camera_event_t *evt,
+ void *user_data)
+{
+ mm_camera_test_obj_t *test_obj =
+ (mm_camera_test_obj_t *)user_data;
+ if (test_obj == NULL || test_obj->cam->camera_handle != camera_handle) {
+ LOGE("Not a valid test obj");
+ return;
+ }
+
+ LOGD("E evt = %d", evt->server_event_type);
+ switch (evt->server_event_type) {
+ case CAM_EVENT_TYPE_AUTO_FOCUS_DONE:
+ LOGD("rcvd auto focus done evt");
+ break;
+ case CAM_EVENT_TYPE_ZOOM_DONE:
+ LOGD("rcvd zoom done evt");
+ break;
+ default:
+ break;
+ }
+
+ LOGD("X");
+}
+
+int mm_app_open(mm_camera_app_t *cam_app,
+ int cam_id,
+ mm_camera_test_obj_t *test_obj)
+{
+ int32_t rc = 0;
+ cam_frame_len_offset_t offset_info;
+
+ LOGD("BEGIN\n");
+
+ rc = cam_app->hal_lib.mm_camera_open((uint8_t)cam_id, &(test_obj->cam));
+ if(rc || !test_obj->cam) {
+ LOGE("dev open error. rc = %d, vtbl = %p\n", rc, test_obj->cam);
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ LOGD("Open Camera id = %d handle = %d", cam_id, test_obj->cam->camera_handle);
+
+ /* alloc ion mem for capability buf */
+ memset(&offset_info, 0, sizeof(offset_info));
+ offset_info.frame_len = sizeof(cam_capability_t);
+
+ rc = mm_app_alloc_bufs(&test_obj->cap_buf,
+ &offset_info,
+ 1,
+ 0,
+ 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("alloc buf for capability error\n");
+ goto error_after_cam_open;
+ }
+
+ /* mapping capability buf */
+ rc = test_obj->cam->ops->map_buf(test_obj->cam->camera_handle,
+ CAM_MAPPING_BUF_TYPE_CAPABILITY,
+ test_obj->cap_buf.mem_info.fd,
+ test_obj->cap_buf.mem_info.size);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("map for capability error\n");
+ goto error_after_cap_buf_alloc;
+ }
+
+ /* alloc ion mem for getparm buf */
+ memset(&offset_info, 0, sizeof(offset_info));
+ offset_info.frame_len = sizeof(parm_buffer_t);
+ rc = mm_app_alloc_bufs(&test_obj->parm_buf,
+ &offset_info,
+ 1,
+ 0,
+ 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("alloc buf for getparm_buf error\n");
+ goto error_after_cap_buf_map;
+ }
+
+ /* mapping getparm buf */
+ rc = test_obj->cam->ops->map_buf(test_obj->cam->camera_handle,
+ CAM_MAPPING_BUF_TYPE_PARM_BUF,
+ test_obj->parm_buf.mem_info.fd,
+ test_obj->parm_buf.mem_info.size);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("map getparm_buf error\n");
+ goto error_after_getparm_buf_alloc;
+ }
+ test_obj->params_buffer = (parm_buffer_t*) test_obj->parm_buf.mem_info.data;
+ LOGH("\n%s params_buffer=%p\n",test_obj->params_buffer);
+
+ rc = test_obj->cam->ops->register_event_notify(test_obj->cam->camera_handle,
+ notify_evt_cb,
+ test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("failed register_event_notify");
+ rc = -MM_CAMERA_E_GENERAL;
+ goto error_after_getparm_buf_map;
+ }
+
+ rc = test_obj->cam->ops->query_capability(test_obj->cam->camera_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("failed query_capability");
+ rc = -MM_CAMERA_E_GENERAL;
+ goto error_after_getparm_buf_map;
+ }
+ memset(&test_obj->jpeg_ops, 0, sizeof(mm_jpeg_ops_t));
+ mm_dimension pic_size;
+ memset(&pic_size, 0, sizeof(mm_dimension));
+ pic_size.w = 4000;
+ pic_size.h = 3000;
+ test_obj->jpeg_hdl = cam_app->hal_lib.jpeg_open(&test_obj->jpeg_ops, NULL, pic_size, NULL);
+ if (test_obj->jpeg_hdl == 0) {
+ LOGE("jpeg lib open err");
+ rc = -MM_CAMERA_E_GENERAL;
+ goto error_after_getparm_buf_map;
+ }
+
+ return rc;
+
+error_after_getparm_buf_map:
+ test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+ CAM_MAPPING_BUF_TYPE_PARM_BUF);
+error_after_getparm_buf_alloc:
+ mm_app_release_bufs(1, &test_obj->parm_buf);
+error_after_cap_buf_map:
+ test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+ CAM_MAPPING_BUF_TYPE_CAPABILITY);
+error_after_cap_buf_alloc:
+ mm_app_release_bufs(1, &test_obj->cap_buf);
+error_after_cam_open:
+ test_obj->cam->ops->close_camera(test_obj->cam->camera_handle);
+ test_obj->cam = NULL;
+ return rc;
+}
+
+int init_batch_update(parm_buffer_t *p_table)
+{
+ int rc = MM_CAMERA_OK;
+ LOGH("\nEnter %s\n");
+ int32_t hal_version = CAM_HAL_V1;
+
+ memset(p_table, 0, sizeof(parm_buffer_t));
+ if(ADD_SET_PARAM_ENTRY_TO_BATCH(p_table, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
+ rc = -1;
+ }
+
+ return rc;
+}
+
+int commit_set_batch(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ int i = 0;
+
+ for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+ if(test_obj->params_buffer->is_valid[i])
+ break;
+ }
+ if (i < CAM_INTF_PARM_MAX) {
+ LOGH("\n set_param p_buffer =%p\n",test_obj->params_buffer);
+ rc = test_obj->cam->ops->set_parms(test_obj->cam->camera_handle, test_obj->params_buffer);
+ }
+ if (rc != MM_CAMERA_OK) {
+ LOGE("cam->ops->set_parms failed !!");
+ }
+ return rc;
+}
+
+int mm_app_close(mm_camera_test_obj_t *test_obj)
+{
+ int32_t rc = MM_CAMERA_OK;
+
+ if (test_obj == NULL || test_obj->cam ==NULL) {
+ LOGE("cam not opened");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ /* unmap capability buf */
+ rc = test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+ CAM_MAPPING_BUF_TYPE_CAPABILITY);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("unmap capability buf failed, rc=%d", rc);
+ }
+
+ /* unmap parm buf */
+ rc = test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+ CAM_MAPPING_BUF_TYPE_PARM_BUF);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("unmap setparm buf failed, rc=%d", rc);
+ }
+
+ rc = test_obj->cam->ops->close_camera(test_obj->cam->camera_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("close camera failed, rc=%d", rc);
+ }
+ test_obj->cam = NULL;
+
+ /* close jpeg client */
+ if (test_obj->jpeg_hdl && test_obj->jpeg_ops.close) {
+ rc = test_obj->jpeg_ops.close(test_obj->jpeg_hdl);
+ test_obj->jpeg_hdl = 0;
+ if (rc != MM_CAMERA_OK) {
+ LOGE("close jpeg failed, rc=%d", rc);
+ }
+ }
+
+ /* dealloc capability buf */
+ rc = mm_app_release_bufs(1, &test_obj->cap_buf);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("release capability buf failed, rc=%d", rc);
+ }
+
+ /* dealloc parm buf */
+ rc = mm_app_release_bufs(1, &test_obj->parm_buf);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("release setparm buf failed, rc=%d", rc);
+ }
+
+ return MM_CAMERA_OK;
+}
+
+mm_camera_channel_t * mm_app_add_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *attr,
+ mm_camera_buf_notify_t channel_cb,
+ void *userdata)
+{
+ uint32_t ch_id = 0;
+ mm_camera_channel_t *channel = NULL;
+
+ ch_id = test_obj->cam->ops->add_channel(test_obj->cam->camera_handle,
+ attr,
+ channel_cb,
+ userdata);
+ if (ch_id == 0) {
+ LOGE("add channel failed");
+ return NULL;
+ }
+ channel = &test_obj->channels[ch_type];
+ channel->ch_id = ch_id;
+ return channel;
+}
+
+int mm_app_del_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel)
+{
+ test_obj->cam->ops->delete_channel(test_obj->cam->camera_handle,
+ channel->ch_id);
+ memset(channel, 0, sizeof(mm_camera_channel_t));
+ return MM_CAMERA_OK;
+}
+
+mm_camera_stream_t * mm_app_add_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel)
+{
+ mm_camera_stream_t *stream = NULL;
+ int rc = MM_CAMERA_OK;
+ cam_frame_len_offset_t offset_info;
+
+ stream = &(channel->streams[channel->num_streams++]);
+ stream->s_id = test_obj->cam->ops->add_stream(test_obj->cam->camera_handle,
+ channel->ch_id);
+ if (stream->s_id == 0) {
+ LOGE("add stream failed");
+ return NULL;
+ }
+
+ stream->multipleOf = test_obj->slice_size;
+
+ /* alloc ion mem for stream_info buf */
+ memset(&offset_info, 0, sizeof(offset_info));
+ offset_info.frame_len = sizeof(cam_stream_info_t);
+
+ rc = mm_app_alloc_bufs(&stream->s_info_buf,
+ &offset_info,
+ 1,
+ 0,
+ 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("alloc buf for stream_info error\n");
+ test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+ channel->ch_id,
+ stream->s_id);
+ stream->s_id = 0;
+ return NULL;
+ }
+
+ /* mapping streaminfo buf */
+ rc = test_obj->cam->ops->map_stream_buf(test_obj->cam->camera_handle,
+ channel->ch_id,
+ stream->s_id,
+ CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+ 0,
+ -1,
+ stream->s_info_buf.mem_info.fd,
+ (uint32_t)stream->s_info_buf.mem_info.size);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("map setparm_buf error\n");
+ mm_app_deallocate_ion_memory(&stream->s_info_buf);
+ test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+ channel->ch_id,
+ stream->s_id);
+ stream->s_id = 0;
+ return NULL;
+ }
+
+ return stream;
+}
+
+int mm_app_del_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_stream_t *stream)
+{
+ test_obj->cam->ops->unmap_stream_buf(test_obj->cam->camera_handle,
+ channel->ch_id,
+ stream->s_id,
+ CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+ 0,
+ -1);
+ mm_app_deallocate_ion_memory(&stream->s_info_buf);
+ test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+ channel->ch_id,
+ stream->s_id);
+ memset(stream, 0, sizeof(mm_camera_stream_t));
+ return MM_CAMERA_OK;
+}
+
+mm_camera_channel_t *mm_app_get_channel_by_type(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_type_t ch_type)
+{
+ return &test_obj->channels[ch_type];
+}
+
+int mm_app_config_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_stream_t *stream,
+ mm_camera_stream_config_t *config)
+{
+ return test_obj->cam->ops->config_stream(test_obj->cam->camera_handle,
+ channel->ch_id,
+ stream->s_id,
+ config);
+}
+
+int mm_app_start_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel)
+{
+ return test_obj->cam->ops->start_channel(test_obj->cam->camera_handle,
+ channel->ch_id);
+}
+
+int mm_app_stop_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel)
+{
+ return test_obj->cam->ops->stop_channel(test_obj->cam->camera_handle,
+ channel->ch_id);
+}
+
+int initBatchUpdate(mm_camera_test_obj_t *test_obj)
+{
+ int32_t hal_version = CAM_HAL_V1;
+
+ parm_buffer_t *parm_buf = ( parm_buffer_t * ) test_obj->parm_buf.mem_info.data;
+ memset(parm_buf, 0, sizeof(parm_buffer_t));
+ ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_HAL_VERSION, hal_version);
+
+ return MM_CAMERA_OK;
+}
+
+int commitSetBatch(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ int i = 0;
+
+ parm_buffer_t *p_table = ( parm_buffer_t * ) test_obj->parm_buf.mem_info.data;
+ for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+ if(p_table->is_valid[i])
+ break;
+ }
+ if (i < CAM_INTF_PARM_MAX) {
+ rc = test_obj->cam->ops->set_parms(test_obj->cam->camera_handle, p_table);
+ }
+ return rc;
+}
+
+
+int commitGetBatch(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ int i = 0;
+ parm_buffer_t *p_table = ( parm_buffer_t * ) test_obj->parm_buf.mem_info.data;
+ for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+ if(p_table->is_valid[i])
+ break;
+ }
+ if (i < CAM_INTF_PARM_MAX) {
+ rc = test_obj->cam->ops->get_parms(test_obj->cam->camera_handle, p_table);
+ }
+ return rc;
+}
+
+int setAecLock(mm_camera_test_obj_t *test_obj, int value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_AEC_LOCK, (uint32_t)value)) {
+ LOGE("AEC Lock parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setAwbLock(mm_camera_test_obj_t *test_obj, int value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_AWB_LOCK, (uint32_t)value)) {
+ LOGE("AWB Lock parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+
+int set3Acommand(mm_camera_test_obj_t *test_obj, cam_eztune_cmd_data_t *value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_EZTUNE_CMD, *value)) {
+ LOGE("CAM_INTF_PARM_EZTUNE_CMD parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setAutoFocusTuning(mm_camera_test_obj_t *test_obj, tune_actuator_t *value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_SET_AUTOFOCUSTUNING, *value)) {
+ LOGE("AutoFocus Tuning not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setVfeCommand(mm_camera_test_obj_t *test_obj, tune_cmd_t *value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_SET_VFE_COMMAND, *value)) {
+ LOGE("VFE Command not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setmetainfoCommand(mm_camera_test_obj_t *test_obj, cam_stream_size_info_t *value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_META_STREAM_INFO, *value)) {
+ LOGE("PP Command not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+
+int setPPCommand(mm_camera_test_obj_t *test_obj, tune_cmd_t *value)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_SET_PP_COMMAND, *value)) {
+ LOGE("PP Command not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setFocusMode(mm_camera_test_obj_t *test_obj, cam_focus_mode_type mode)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ uint32_t value = mode;
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_FOCUS_MODE, value)) {
+ LOGE("Focus mode parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setEVCompensation(mm_camera_test_obj_t *test_obj, int ev)
+{
+ int rc = MM_CAMERA_OK;
+
+ cam_capability_t *camera_cap = NULL;
+
+ camera_cap = (cam_capability_t *) test_obj->cap_buf.mem_info.data;
+ if ( (ev >= camera_cap->exposure_compensation_min) &&
+ (ev <= camera_cap->exposure_compensation_max) ) {
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_EXPOSURE_COMPENSATION, ev)) {
+ LOGE("EV compensation parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("EV compensation set to: %d", ev);
+ } else {
+ LOGE("Invalid EV compensation");
+ return -EINVAL;
+ }
+
+ERROR:
+ return rc;
+}
+
+int setAntibanding(mm_camera_test_obj_t *test_obj, cam_antibanding_mode_type antibanding)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_ANTIBANDING, antibanding)) {
+ LOGE("Antibanding parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Antibanding set to: %d", (int)antibanding);
+
+ERROR:
+ return rc;
+}
+
+int setWhiteBalance(mm_camera_test_obj_t *test_obj, cam_wb_mode_type mode)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_WHITE_BALANCE, mode)) {
+ LOGE("White balance parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("White balance set to: %d", (int)mode);
+
+ERROR:
+ return rc;
+}
+
+int setExposureMetering(mm_camera_test_obj_t *test_obj, cam_auto_exposure_mode_type mode)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_EXPOSURE, mode)) {
+ LOGE("Exposure metering parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Exposure metering set to: %d", (int)mode);
+
+ERROR:
+ return rc;
+}
+
+int setBrightness(mm_camera_test_obj_t *test_obj, int brightness)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_BRIGHTNESS, brightness)) {
+ LOGE("Brightness parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Brightness set to: %d", brightness);
+
+ERROR:
+ return rc;
+}
+
+int setContrast(mm_camera_test_obj_t *test_obj, int contrast)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_CONTRAST, contrast)) {
+ LOGE("Contrast parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Contrast set to: %d", contrast);
+
+ERROR:
+ return rc;
+}
+
+int setTintless(mm_camera_test_obj_t *test_obj, int tintless)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_TINTLESS, tintless)) {
+ LOGE("Tintless parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("set Tintless to: %d", tintless);
+
+ERROR:
+ return rc;
+}
+
+int setSaturation(mm_camera_test_obj_t *test_obj, int saturation)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_SATURATION, saturation)) {
+ LOGE("Saturation parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Saturation set to: %d", saturation);
+
+ERROR:
+ return rc;
+}
+
+int setSharpness(mm_camera_test_obj_t *test_obj, int sharpness)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_SHARPNESS, sharpness)) {
+ LOGE("Sharpness parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ test_obj->reproc_sharpness = sharpness;
+ LOGE("Sharpness set to: %d", sharpness);
+
+ERROR:
+ return rc;
+}
+
+int setISO(mm_camera_test_obj_t *test_obj, cam_iso_mode_type iso)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ cam_intf_parm_manual_3a_t iso_settings;
+ memset(&iso_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+ iso_settings.previewOnly = FALSE;
+ iso_settings.value = (uint64_t)iso;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_ISO, iso_settings)) {
+ LOGE("ISO parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("ISO set to: %d", (int)iso);
+
+ERROR:
+ return rc;
+}
+
+int setZoom(mm_camera_test_obj_t *test_obj, int zoom)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_ZOOM, zoom)) {
+ LOGE("Zoom parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Zoom set to: %d", zoom);
+
+ERROR:
+ return rc;
+}
+
+int setFPSRange(mm_camera_test_obj_t *test_obj, cam_fps_range_t range)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_FPS_RANGE, range)) {
+ LOGE("FPS range parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("FPS Range set to: [%5.2f:%5.2f]",
+ range.min_fps,
+ range.max_fps);
+
+ERROR:
+ return rc;
+}
+
+int setScene(mm_camera_test_obj_t *test_obj, cam_scene_mode_type scene)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_BESTSHOT_MODE, scene)) {
+ LOGE("Scene parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Scene set to: %d", (int)scene);
+
+ERROR:
+ return rc;
+}
+
+int setFlash(mm_camera_test_obj_t *test_obj, cam_flash_mode_t flash)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_LED_MODE, flash)) {
+ LOGE("Flash parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ LOGE("Flash set to: %d", (int)flash);
+
+ERROR:
+ return rc;
+}
+
+int setWNR(mm_camera_test_obj_t *test_obj, uint8_t enable)
+{
+ int rc = MM_CAMERA_OK;
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ cam_denoise_param_t param;
+ memset(&param, 0, sizeof(cam_denoise_param_t));
+ param.denoise_enable = enable;
+ param.process_plates = CAM_WAVELET_DENOISE_YCBCR_PLANE;
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_WAVELET_DENOISE, param)) {
+ LOGE("WNR enabled parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+
+ test_obj->reproc_wnr = param;
+ LOGE("WNR enabled: %d", enable);
+
+ERROR:
+ return rc;
+}
+
+
+/** tuneserver_capture
+ * @lib_handle: the camera handle object
+ * @dim: snapshot dimensions
+ *
+ * makes JPEG capture
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+int tuneserver_capture(mm_camera_lib_handle *lib_handle,
+ mm_camera_lib_snapshot_params *dim)
+{
+ int rc = 0;
+
+ printf("Take jpeg snapshot\n");
+ if ( lib_handle->stream_running ) {
+
+ if ( lib_handle->test_obj.zsl_enabled) {
+ if ( NULL != dim) {
+ if ( ( lib_handle->test_obj.buffer_width != dim->width) ||
+ ( lib_handle->test_obj.buffer_height = dim->height ) ) {
+
+ lib_handle->test_obj.buffer_width = dim->width;
+ lib_handle->test_obj.buffer_height = dim->height;
+
+ rc = mm_camera_lib_stop_stream(lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_stop_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ rc = mm_camera_lib_start_stream(lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_start_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+
+ }
+
+ lib_handle->test_obj.encodeJpeg = 1;
+
+ mm_camera_app_wait();
+ } else {
+ // For standard 2D capture streaming has to be disabled first
+ rc = mm_camera_lib_stop_stream(lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_stop_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ if ( NULL != dim ) {
+ lib_handle->test_obj.buffer_width = dim->width;
+ lib_handle->test_obj.buffer_height = dim->height;
+ }
+ rc = mm_app_start_capture(&lib_handle->test_obj, 1);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_capture() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ mm_camera_app_wait();
+
+ rc = mm_app_stop_capture(&lib_handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_capture() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ // Restart streaming after capture is done
+ rc = mm_camera_lib_start_stream(lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_start_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ }
+
+EXIT:
+
+ return rc;
+}
+
+int mm_app_start_regression_test(int run_tc)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_t my_cam_app;
+
+ LOGD("\nCamera Test Application\n");
+ memset(&my_cam_app, 0, sizeof(mm_camera_app_t));
+
+ rc = mm_app_load_hal(&my_cam_app);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_load_hal failed !!");
+ return rc;
+ }
+
+ if(run_tc) {
+ rc = mm_app_unit_test_entry(&my_cam_app);
+ return rc;
+ }
+#if 0
+ if(run_dual_tc) {
+ printf("\tRunning Dual camera test engine only\n");
+ rc = mm_app_dual_test_entry(&my_cam_app);
+ printf("\t Dual camera engine. EXIT(%d)!!!\n", rc);
+ exit(rc);
+ }
+#endif
+ return rc;
+}
+
+int32_t mm_camera_load_tuninglibrary(mm_camera_tuning_lib_params_t *tuning_param)
+{
+ void *(*tuning_open_lib)(void) = NULL;
+
+ LOGD("E");
+ tuning_param->lib_handle = dlopen("libmmcamera_tuning.so", RTLD_NOW);
+ if (!tuning_param->lib_handle) {
+ LOGE("Failed opening libmmcamera_tuning.so\n");
+ return -EINVAL;
+ }
+
+ *(void **)&tuning_open_lib = dlsym(tuning_param->lib_handle,
+ "open_tuning_lib");
+ if (!tuning_open_lib) {
+ LOGE("Failed symbol libmmcamera_tuning.so\n");
+ return -EINVAL;
+ }
+
+ if (tuning_param->func_tbl) {
+ LOGE("already loaded tuninglib..");
+ return 0;
+ }
+
+ tuning_param->func_tbl = (mm_camera_tune_func_t *)tuning_open_lib();
+ if (!tuning_param->func_tbl) {
+ LOGE("Failed opening library func table ptr\n");
+ return -EINVAL;
+ }
+
+ LOGD("X");
+ return 0;
+}
+
+int mm_camera_lib_open(mm_camera_lib_handle *handle, int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ memset(handle, 0, sizeof(mm_camera_lib_handle));
+ rc = mm_app_load_hal(&handle->app_ctx);
+ if( MM_CAMERA_OK != rc ) {
+ LOGE("mm_app_init err\n");
+ goto EXIT;
+ }
+
+ handle->test_obj.buffer_width = DEFAULT_PREVIEW_WIDTH;
+ handle->test_obj.buffer_height = DEFAULT_PREVIEW_HEIGHT;
+ handle->test_obj.buffer_format = DEFAULT_SNAPSHOT_FORMAT;
+ handle->current_params.stream_width = DEFAULT_SNAPSHOT_WIDTH;
+ handle->current_params.stream_height = DEFAULT_SNAPSHOT_HEIGHT;
+ handle->current_params.af_mode = CAM_FOCUS_MODE_AUTO; // Default to auto focus mode
+ rc = mm_app_open(&handle->app_ctx, (uint8_t)cam_id, &handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ cam_id, rc);
+ goto EXIT;
+ }
+
+ //rc = mm_app_initialize_fb(&handle->test_obj);
+ rc = MM_CAMERA_OK;
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_initialize_fb() cam_idx=%d, err=%d\n",
+ cam_id, rc);
+ goto EXIT;
+ }
+
+EXIT:
+
+ return rc;
+}
+
+int mm_camera_lib_start_stream(mm_camera_lib_handle *handle)
+{
+ int rc = MM_CAMERA_OK;
+ cam_capability_t camera_cap;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ if ( handle->test_obj.zsl_enabled ) {
+ rc = mm_app_start_preview_zsl(&handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_preview_zsl() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ } else {
+ handle->test_obj.enable_reproc = ENABLE_REPROCESSING;
+ rc = mm_app_start_preview(&handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_preview() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+
+ // Configure focus mode after stream starts
+ rc = mm_camera_lib_get_caps(handle, &camera_cap);
+ if ( MM_CAMERA_OK != rc ) {
+ LOGE("mm_camera_lib_get_caps() err=%d\n", rc);
+ return -1;
+ }
+ if (camera_cap.supported_focus_modes_cnt == 1 &&
+ camera_cap.supported_focus_modes[0] == CAM_FOCUS_MODE_FIXED) {
+ LOGD("focus not supported");
+ handle->test_obj.focus_supported = 0;
+ handle->current_params.af_mode = CAM_FOCUS_MODE_FIXED;
+ } else {
+ handle->test_obj.focus_supported = 1;
+ }
+ rc = setFocusMode(&handle->test_obj, handle->current_params.af_mode);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("autofocus error\n");
+ goto EXIT;
+ }
+ handle->stream_running = 1;
+
+EXIT:
+ return rc;
+}
+
+int mm_camera_lib_stop_stream(mm_camera_lib_handle *handle)
+{
+ int rc = MM_CAMERA_OK;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ if ( handle->test_obj.zsl_enabled ) {
+ rc = mm_app_stop_preview_zsl(&handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_preview_zsl() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ } else {
+ rc = mm_app_stop_preview(&handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_preview() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+
+ handle->stream_running = 0;
+
+EXIT:
+ return rc;
+}
+
+int mm_camera_lib_get_caps(mm_camera_lib_handle *handle,
+ cam_capability_t *caps)
+{
+ int rc = MM_CAMERA_OK;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ if ( NULL == caps ) {
+ LOGE(" Invalid capabilities structure");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ *caps = *( (cam_capability_t *) handle->test_obj.cap_buf.mem_info.data );
+
+EXIT:
+
+ return rc;
+}
+
+
+int mm_camera_lib_send_command(mm_camera_lib_handle *handle,
+ mm_camera_lib_commands cmd,
+ void *in_data,
+ __unused void *out_data)
+{
+ uint32_t width, height;
+ int rc = MM_CAMERA_OK;
+ cam_capability_t *camera_cap = NULL;
+ mm_camera_lib_snapshot_params *dim = NULL;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ camera_cap = (cam_capability_t *) handle->test_obj.cap_buf.mem_info.data;
+
+ switch(cmd) {
+ case MM_CAMERA_LIB_FPS_RANGE:
+ if ( NULL != in_data ) {
+ cam_fps_range_t range = *(( cam_fps_range_t * )in_data);
+ rc = setFPSRange(&handle->test_obj, range);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setFPSRange() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_FLASH:
+ if ( NULL != in_data ) {
+ cam_flash_mode_t flash = *(( int * )in_data);
+ rc = setFlash(&handle->test_obj, flash);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setFlash() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_BESTSHOT:
+ if ( NULL != in_data ) {
+ cam_scene_mode_type scene = *(( int * )in_data);
+ rc = setScene(&handle->test_obj, scene);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setScene() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_ZOOM:
+ if ( NULL != in_data ) {
+ int zoom = *(( int * )in_data);
+ rc = setZoom(&handle->test_obj, zoom);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setZoom() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_ISO:
+ if ( NULL != in_data ) {
+ cam_iso_mode_type iso = *(( int * )in_data);
+ rc = setISO(&handle->test_obj, iso);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setISO() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_SHARPNESS:
+ if ( NULL != in_data ) {
+ int sharpness = *(( int * )in_data);
+ rc = setSharpness(&handle->test_obj, sharpness);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setSharpness() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_SATURATION:
+ if ( NULL != in_data ) {
+ int saturation = *(( int * )in_data);
+ rc = setSaturation(&handle->test_obj, saturation);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setSaturation() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_CONTRAST:
+ if ( NULL != in_data ) {
+ int contrast = *(( int * )in_data);
+ rc = setContrast(&handle->test_obj, contrast);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setContrast() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_SET_TINTLESS:
+ if ( NULL != in_data ) {
+ int tintless = *(( int * )in_data);
+ rc = setTintless(&handle->test_obj, tintless);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("enlabe/disable:%d tintless() err=%d\n",
+ tintless, rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_BRIGHTNESS:
+ if ( NULL != in_data ) {
+ int brightness = *(( int * )in_data);
+ rc = setBrightness(&handle->test_obj, brightness);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setBrightness() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_EXPOSURE_METERING:
+ if ( NULL != in_data ) {
+ cam_auto_exposure_mode_type exp = *(( int * )in_data);
+ rc = setExposureMetering(&handle->test_obj, exp);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setExposureMetering() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_WB:
+ if ( NULL != in_data ) {
+ cam_wb_mode_type wb = *(( int * )in_data);
+ rc = setWhiteBalance(&handle->test_obj, wb);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setWhiteBalance() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_ANTIBANDING:
+ if ( NULL != in_data ) {
+ int antibanding = *(( int * )in_data);
+ rc = setAntibanding(&handle->test_obj, antibanding);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setAntibanding() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_EV:
+ if ( NULL != in_data ) {
+ int ev = *(( int * )in_data);
+ rc = setEVCompensation(&handle->test_obj, ev);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("setEVCompensation() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_ZSL_ENABLE:
+ if ( NULL != in_data) {
+ int enable_zsl = *(( int * )in_data);
+ if ( ( enable_zsl != handle->test_obj.zsl_enabled ) &&
+ handle->stream_running ) {
+ rc = mm_camera_lib_stop_stream(handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_stop_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ handle->test_obj.zsl_enabled = enable_zsl;
+ rc = mm_camera_lib_start_stream(handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_start_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+ } else {
+ handle->test_obj.zsl_enabled = enable_zsl;
+ }
+ }
+ break;
+ case MM_CAMERA_LIB_RAW_CAPTURE:
+
+ if ( 0 == handle->stream_running ) {
+ LOGE(" Streaming is not enabled!");
+ rc = MM_CAMERA_E_INVALID_OPERATION;
+ goto EXIT;
+ }
+
+ rc = mm_camera_lib_stop_stream(handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_stop_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ width = handle->test_obj.buffer_width;
+ height = handle->test_obj.buffer_height;
+ handle->test_obj.buffer_width =
+ (uint32_t)camera_cap->raw_dim[0].width;
+ handle->test_obj.buffer_height =
+ (uint32_t)camera_cap->raw_dim[0].height;
+ handle->test_obj.buffer_format = DEFAULT_RAW_FORMAT;
+ LOGE("MM_CAMERA_LIB_RAW_CAPTURE %dx%d\n",
+ camera_cap->raw_dim[0].width,
+ camera_cap->raw_dim[0].height);
+ rc = mm_app_start_capture_raw(&handle->test_obj, 1);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_capture() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ mm_camera_app_wait();
+
+ rc = mm_app_stop_capture_raw(&handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_capture() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ handle->test_obj.buffer_width = width;
+ handle->test_obj.buffer_height = height;
+ handle->test_obj.buffer_format = DEFAULT_SNAPSHOT_FORMAT;
+ rc = mm_camera_lib_start_stream(handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_start_stream() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ break;
+
+ case MM_CAMERA_LIB_JPEG_CAPTURE:
+ if ( 0 == handle->stream_running ) {
+ LOGE(" Streaming is not enabled!");
+ rc = MM_CAMERA_E_INVALID_OPERATION;
+ goto EXIT;
+ }
+
+ if ( NULL != in_data ) {
+ dim = ( mm_camera_lib_snapshot_params * ) in_data;
+ }
+
+ rc = tuneserver_capture(handle, dim);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("capture error %d\n", rc);
+ goto EXIT;
+ }
+ break;
+
+ case MM_CAMERA_LIB_SET_FOCUS_MODE: {
+ cam_focus_mode_type mode = *((cam_focus_mode_type *)in_data);
+ handle->current_params.af_mode = mode;
+ rc = setFocusMode(&handle->test_obj, mode);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("autofocus error\n");
+ goto EXIT;
+ }
+ break;
+ }
+
+ case MM_CAMERA_LIB_DO_AF:
+ if (handle->test_obj.focus_supported) {
+ rc = handle->test_obj.cam->ops->do_auto_focus(handle->test_obj.cam->camera_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("autofocus error\n");
+ goto EXIT;
+ }
+ /*Waiting for Auto Focus Done Call Back*/
+ mm_camera_app_wait();
+ }
+ break;
+
+ case MM_CAMERA_LIB_CANCEL_AF:
+ rc = handle->test_obj.cam->ops->cancel_auto_focus(handle->test_obj.cam->camera_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("autofocus error\n");
+ goto EXIT;
+ }
+
+ break;
+
+ case MM_CAMERA_LIB_LOCK_AWB:
+ rc = setAwbLock(&handle->test_obj, 1);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("AWB locking failed\n");
+ goto EXIT;
+ }
+ break;
+
+ case MM_CAMERA_LIB_UNLOCK_AWB:
+ rc = setAwbLock(&handle->test_obj, 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("AE unlocking failed\n");
+ goto EXIT;
+ }
+ break;
+
+ case MM_CAMERA_LIB_LOCK_AE:
+ rc = setAecLock(&handle->test_obj, 1);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("AE locking failed\n");
+ goto EXIT;
+ }
+ break;
+
+ case MM_CAMERA_LIB_UNLOCK_AE:
+ rc = setAecLock(&handle->test_obj, 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("AE unlocking failed\n");
+ goto EXIT;
+ }
+ break;
+
+ case MM_CAMERA_LIB_SET_3A_COMMAND: {
+ rc = set3Acommand(&handle->test_obj, (cam_eztune_cmd_data_t *)in_data);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("3A set command error\n");
+ goto EXIT;
+ }
+ break;
+ }
+
+ case MM_CAMERA_LIB_SET_AUTOFOCUS_TUNING: {
+ rc = setAutoFocusTuning(&handle->test_obj, in_data);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Set AF tuning failed\n");
+ goto EXIT;
+ }
+ break;
+ }
+
+ case MM_CAMERA_LIB_SET_VFE_COMMAND: {
+ rc = setVfeCommand(&handle->test_obj, in_data);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Set vfe command failed\n");
+ goto EXIT;
+ }
+ break;
+ }
+
+ case MM_CAMERA_LIB_SET_POSTPROC_COMMAND: {
+ rc = setPPCommand(&handle->test_obj, in_data);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Set pp command failed\n");
+ goto EXIT;
+ }
+ break;
+ }
+
+ case MM_CAMERA_LIB_WNR_ENABLE: {
+ rc = setWNR(&handle->test_obj, *((uint8_t *)in_data));
+ if ( rc != MM_CAMERA_OK) {
+ LOGE("Set wnr enable failed\n");
+ goto EXIT;
+ }
+ }
+
+ case MM_CAMERA_LIB_NO_ACTION:
+ default:
+ break;
+ };
+
+EXIT:
+
+ return rc;
+}
+int mm_camera_lib_number_of_cameras(mm_camera_lib_handle *handle)
+{
+ int rc = 0;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ goto EXIT;
+ }
+
+ rc = handle->app_ctx.num_cameras;
+
+EXIT:
+
+ return rc;
+}
+
+int mm_camera_lib_close(mm_camera_lib_handle *handle)
+{
+ int rc = MM_CAMERA_OK;
+
+ if ( NULL == handle ) {
+ LOGE(" Invalid handle");
+ rc = MM_CAMERA_E_INVALID_INPUT;
+ goto EXIT;
+ }
+
+ //rc = mm_app_close_fb(&handle->test_obj);
+ rc = MM_CAMERA_OK;
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close_fb() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+ rc = mm_app_close(&handle->test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n",
+ rc);
+ goto EXIT;
+ }
+
+EXIT:
+ return rc;
+}
+
+int mm_camera_lib_set_preview_usercb(
+ mm_camera_lib_handle *handle, cam_stream_user_cb cb)
+{
+ if (handle->test_obj.user_preview_cb != NULL) {
+ LOGE(" already set preview callbacks\n");
+ return -1;
+ }
+ handle->test_obj.user_preview_cb = *cb;
+ return 0;
+}
+
+int mm_app_set_preview_fps_range(mm_camera_test_obj_t *test_obj,
+ cam_fps_range_t *fpsRange)
+{
+ int rc = MM_CAMERA_OK;
+ LOGH("preview fps range: min=%f, max=%f.",
+ fpsRange->min_fps, fpsRange->max_fps);
+ rc = setFPSRange(test_obj, *fpsRange);
+
+ if (rc != MM_CAMERA_OK) {
+ LOGE("add_parm_entry_tobatch failed !!");
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_set_face_detection(mm_camera_test_obj_t *test_obj,
+ cam_fd_set_parm_t *fd_set_parm)
+{
+ int rc = MM_CAMERA_OK;
+
+ if (test_obj == NULL || fd_set_parm == NULL) {
+ LOGE(" invalid params!");
+ return MM_CAMERA_E_INVALID_INPUT;
+ }
+
+ LOGH("mode = %d, num_fd = %d",
+ fd_set_parm->fd_mode, fd_set_parm->num_fd);
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_FD, *fd_set_parm)) {
+ LOGE("FD parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int mm_app_set_flash_mode(mm_camera_test_obj_t *test_obj,
+ cam_flash_mode_t flashMode)
+{
+ int rc = MM_CAMERA_OK;
+
+ if (test_obj == NULL) {
+ LOGE(" invalid params!");
+ return MM_CAMERA_E_INVALID_INPUT;
+ }
+
+ LOGH("mode = %d", (int)flashMode);
+
+ rc = initBatchUpdate(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch camera parameter update failed\n");
+ goto ERROR;
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+ CAM_INTF_PARM_LED_MODE, flashMode)) {
+ LOGE("Flash mode parameter not added to batch\n");
+ rc = -1;
+ goto ERROR;
+ }
+
+ rc = commitSetBatch(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("Batch parameters commit failed\n");
+ goto ERROR;
+ }
+
+ERROR:
+ return rc;
+}
+
+int mm_app_set_metadata_usercb(mm_camera_test_obj_t *test_obj,
+ cam_stream_user_cb usercb)
+{
+ if (test_obj == NULL || usercb == NULL) {
+ LOGE(" invalid params!");
+ return MM_CAMERA_E_INVALID_INPUT;
+ }
+
+ LOGH("%s, set user metadata callback, addr: %p\n", usercb);
+
+ if (test_obj->user_metadata_cb != NULL) {
+ LOGH("%s, already set user metadata callback");
+ }
+ test_obj->user_metadata_cb = usercb;
+
+ return 0;
+}
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c
new file mode 100644
index 0000000..45fb7a8
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c
@@ -0,0 +1,291 @@
+/* Copyright (c) 2012-2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// Camera dependencies
+#include "mm_qcamera_commands.h"
+#include "mm_qcamera_dbg.h"
+
+int tuneserver_initialize_prevtuningp(void * ctrl,
+ int pr_client_socket_id, cam_dimension_t dimension,
+ char **send_buf, uint32_t *send_len)
+{
+ int result = 0;
+ mm_camera_lib_handle *lib_handle = (mm_camera_lib_handle *) ctrl;
+ tuningserver_t *tctrl = &lib_handle->tsctrl;
+
+ LOGD("E");
+ if (tctrl->tuning_params.func_tbl->prevcommand_process == NULL) {
+ LOGE("prevcommand_process is NULL");
+ return -1;
+ }
+
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ NULL, TUNE_PREVCMD_INIT, (void *)&pr_client_socket_id,
+ send_buf, send_len);
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ NULL, TUNE_PREVCMD_SETDIM, (void *)&dimension,
+ send_buf, send_len);
+
+ mm_camera_lib_set_preview_usercb(lib_handle,
+ (tctrl->tuning_params.func_tbl->prevframe_callback));
+
+ return result;
+}
+
+int tuneserver_deinitialize_prevtuningp(void * ctrl,
+ char **send_buf, uint32_t *send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ &tctrl->pr_proto, TUNE_PREVCMD_DEINIT, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_preview_getinfo(void * ctrl, char **send_buf, uint32_t *send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ &tctrl->pr_proto, TUNE_PREVCMD_GETINFO, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_preview_getchunksize(void * ctrl,
+ char **send_buf, uint32_t *send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ &tctrl->pr_proto, TUNE_PREVCMD_GETCHUNKSIZE,
+ (void *)&tctrl->pr_proto->new_cnk_size, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_preview_getframe(void * ctrl,
+ char **send_buf, uint32_t *send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ &tctrl->pr_proto, TUNE_PREVCMD_GETFRAME, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_preview_unsupported(void * ctrl,
+ char **send_buf, uint32_t *send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->prevcommand_process(
+ &tctrl->pr_proto, TUNE_PREVCMD_UNSUPPORTED, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_initialize_tuningp(void * ctrl, int client_socket_id,
+ char *send_buf, uint32_t send_len)
+{
+ int result = 0;
+ mm_camera_lib_handle *lib_handle = (mm_camera_lib_handle *) ctrl;
+ tuningserver_t *tctrl = &lib_handle->tsctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->command_process(
+ lib_handle, TUNE_CMD_INIT, &client_socket_id, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_deinitialize_tuningp(void * ctrl, int client_socket_id,
+ char *send_buf, uint32_t send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+
+ result = tctrl->tuning_params.func_tbl->command_process(
+ NULL, TUNE_CMD_DEINIT, &client_socket_id, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_process_get_list_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->command_process(
+ recv_cmd, TUNE_CMD_GET_LIST, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_process_get_params_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->command_process
+ (recv_cmd, TUNE_CMD_GET_PARAMS, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_process_set_params_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->command_process(
+ recv_cmd, TUNE_CMD_SET_PARAMS, NULL, send_buf, send_len);
+
+ return result;
+}
+
+int tuneserver_process_misc_cmd(void * ctrl, void *recv_cmd,
+ char *send_buf, uint32_t send_len)
+{
+ int result = 0;
+ tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+ LOGD("E");
+ result = tctrl->tuning_params.func_tbl->command_process(
+ recv_cmd, TUNE_CMD_MISC, NULL, send_buf, send_len);
+
+ return result;
+}
+
+/** tuneserver_close_cam
+ * @lib_handle: the camera handle object
+ *
+ * closes the camera
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+int tuneserver_close_cam(mm_camera_lib_handle *lib_handle)
+{
+ int result = 0;
+
+ result = mm_camera_lib_close(lib_handle);
+ if (result < 0) {
+ printf(" Camera close failed\n");
+ } else {
+ printf("Camera is closed \n");
+ }
+ return result;
+}
+#if 0
+/** tuneserver_start_cam
+ * @lib_handle: the camera handle object
+ *
+ * starts the camera
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+static int tuneserver_start_cam(mm_camera_lib_handle *lib_handle)
+{
+ int result = 0;
+
+ result = mm_camera_lib_start_stream(lib_handle);
+ if (result < 0) {
+ printf(" Camera start failed\n");
+ goto error1;
+ }
+ return result;
+error1:
+ mm_camera_lib_close(lib_handle);
+ return result;
+}
+#endif
+
+/** tuneserver_stop_cam
+ * @lib_handle: the camera handle object
+ *
+ * stops the camera
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+int tuneserver_stop_cam(mm_camera_lib_handle *lib_handle)
+{
+ int result = 0;
+
+ result = mm_camera_lib_stop_stream(lib_handle);
+ if (result < 0) {
+ printf(" Camera stop failed\n");
+ }
+// result = mm_camera_lib_close(lib_handle);
+ return result;
+}
+
+/** tuneserver_open_cam
+ * @lib_handle: the camera handle object
+ *
+ * opens the camera
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+#if 1
+int tuneserver_open_cam(mm_camera_lib_handle *lib_handle)
+{
+ int result = 0;
+
+ LOGD("E");
+ result = mm_camera_load_tuninglibrary(&lib_handle->tsctrl.tuning_params);
+ if (result < 0) {
+ LOGE(" tuning library open failed\n");
+ }
+ return result;
+}
+#endif
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c
new file mode 100644
index 0000000..564c474
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c
@@ -0,0 +1,1933 @@
+/*
+Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "mm_qcamera_unit_test.h"
+#include "mm_camera_dbg.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 4
+#define MM_QCAM_APP_TEST_NUM 128
+
+#define MM_QCAMERA_APP_WAIT_TIME 1000000000
+
+extern int system_dimension_set(int cam_id);
+extern int stopPreview(int cam_id);
+extern int takePicture_yuv(int cam_id);
+extern int takePicture_rdi(int cam_id);
+extern int startRdi(int cam_id);
+extern int stopRdi(int cam_id);
+extern int startStats(int cam_id);
+extern int stopStats(int cam_id);
+
+
+/*
+* 1. open back
+* 2. open front
+* 3. start back
+* 4. start front
+* 5. stop back
+* 6. stop front
+* 7. close back
+* 8. close front
+* 9. take picture
+* a. start recording
+* b. stop recording
+* c. take picture rdi
+*/
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+static int num_test_cases = 0;
+struct test_case_params {
+ uint16_t launch;
+ uint16_t preview;
+ uint16_t recording;
+ uint16_t snapshot;
+};
+
+/* Test case 12436857 :*/
+
+int mm_app_dtc_0(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 0...\n");
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL stop camera Rdi for front \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL close front camera\n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ sleep(1);
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close back camera \n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 12436587 :*/
+
+int mm_app_dtc_1(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 1...\n");
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera Preview for back \n");
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+ LOGE("DUAL stop camera Preview for front \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close front camera\n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL close back camera \n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 12436578 :*/
+
+int mm_app_dtc_2(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 2...\n");
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera Preview for back \n");
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+ LOGE("DUAL stop camera Preview for front \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close back camera \n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL close front camera\n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 241395768 : 1357 * 3, This is performed three times
+* And for each iteration 9 is performed thrice */
+
+int mm_app_dtc_3(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j,k;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview and snapshot on back Camera and RDI on Front camera 3...\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for front \n");
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() frontcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(10*1000);
+
+ for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++) {
+ LOGE("DUAL open back camera %d \n",k);
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ LOGE("DUAL take picture for back \n");
+ if ( MM_CAMERA_OK != (rc = takePicture_yuv(back_camera))) {
+ LOGE(" TakePicture() err=%d\n", rc);
+ break;
+ }
+ mm_camera_app_wait();
+
+ }
+ usleep(10*1000);
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGE(" stopPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL close back camera\n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ usleep(20*1000);
+ }
+ LOGE("DUAL stop camera Preview for Rdi \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGD(" stopRdi() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close front camera \n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 2413ab5768 : 1357 * 3, This is performed three times
+* And for each iteration ab is performed thrice */
+
+int mm_app_dtc_4(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j,k;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 4...\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for front \n");
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() frontcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(20*1000);
+
+ for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++){
+ LOGE("DUAL open back camera %d \n",k);
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(30*1000);
+
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ LOGE("DUAL start camera record for back \n");
+ if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+ LOGE(" StartVideorecording() err=%d\n", rc);
+ break;
+ }
+
+ mm_camera_app_wait();
+ usleep(15*1000);
+ LOGE("DUAL stop camera record for back \n");
+ if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+ LOGE(" Stopvideorecording() err=%d\n", rc);
+ break;
+ }
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGE(" stopPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL close back camera\n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ usleep(20*1000);
+ }
+ LOGE("DUAL stop camera Preview for Rdi \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGD(" stopRdi() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close front camera \n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 24135768 : 1357 * 3, This is performed three times*/
+
+int mm_app_dtc_5(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j,k;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 5...\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for front \n");
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() frontcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ for (k = 0; k < 4 ; k++) {
+ LOGE("DUAL open back camera %d \n",k);
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGE(" stopPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL close back camera\n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ sleep(1);
+ }
+ LOGE("DUAL stop camera Preview for Rdi \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGD(" stopRdi() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close front camera \n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 13246857 : 2468 * 3, This is performed three times*/
+
+int mm_app_dtc_6(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j,k;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 6...\n");
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for back \n");
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ for (k = 0; k < 4 ; k++) {
+ LOGE("DUAL open front camera %d \n",k);
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL stop camera Preview for front \n");
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL close front camera\n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ sleep(1);
+ }
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close back camera \n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/*Multi Threaded Test Cases*/
+static void *front_thread(void *data)
+{
+ int front_camera = 1;
+ int rc = MM_CAMERA_OK;
+ int i,j,k,m;
+ struct test_case_params params
+ = *((struct test_case_params *)data);
+ for (i = 0; i < params.launch; i++) {
+ LOGE("DUAL open front camera %d\n",i);
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ for (j = 0; j < params.preview; j++) {
+ LOGE("DUAL start camera Rdi for front %d ,%d \n",i,j);
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(20*1000);
+ for (k = 0; k < params.snapshot; k++) {
+ LOGE("DUAL take picture for front %d,%d,%d \n",i,j,k);
+ if ( MM_CAMERA_OK != (rc = takePicture_rdi(front_camera))) {
+ LOGE(" TakePicture() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(30*1000);
+ }
+ LOGE("DUAL stop camera Rdi for front %d,%d\n",i,j);
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ }
+
+ LOGE("DUAL close front camera %d\n",i);
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ }
+end:
+ LOGE("DUAL front thread close %d",rc);
+ return NULL;
+}
+
+static void *back_thread(void *data)
+{
+ int rc = MM_CAMERA_OK;
+ int back_camera = 0;
+ int i,j,k,m;
+ struct test_case_params params
+ = *((struct test_case_params *)data);
+ for (i = 0; i < params.launch; i++) {
+ LOGE("DUAL open back camera %d\n",i);
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ for (j = 0; j < params.preview; j++) {
+ LOGE("DUAL start camera Preview for back %d, %d\n",i,j);
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(20*1000);
+ for (k = 0; k < params.snapshot; k++) {
+ LOGE("DUAL take picture for back %d, %d, %d\n",i,j,k);
+ if ( MM_CAMERA_OK != (rc = takePicture_yuv(back_camera))) {
+ LOGE(" TakePicture() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(30*1000);
+ }
+
+ for (m = 0; m < params.recording; m++) {
+ LOGE("DUAL start record for back %d, %d, %d\n",i,j,m);
+ if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+ LOGE(" StartVideorecording() err=%d\n", rc);
+ break;
+ }
+
+ mm_camera_app_wait();
+ usleep(10*1000);
+ LOGE("DUAL stop camera record for back \n");
+ if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+ LOGE(" Stopvideorecording() err=%d\n", rc);
+ break;
+ }
+ usleep(10*1000);
+ }
+ LOGE("DUAL stop camera Preview for back %d, %d\n",i,j);
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ }
+
+ LOGE("DUAL close back camera %d\n",i);
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ }
+end:
+ LOGE("DUAL back thread close %d",rc);
+ return NULL;
+}
+
+/* Test case m13572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_7(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int result = 0;
+ pthread_t back_thread_id, front_thread_id;
+ struct test_case_params params;
+ memset(&params, 0, sizeof(struct test_case_params));
+ params.launch = 5;
+ params.preview = 5;
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 7...\n");
+
+ LOGE("start back DUAL ");
+ rc = pthread_create(&back_thread_id, NULL, back_thread, &params);
+ LOGE("start front DUAL ");
+ rc = pthread_create(&front_thread_id, NULL, front_thread, &params);
+ sleep(1);
+ LOGE("stop back DUAL ");
+ rc = pthread_join(back_thread_id, NULL);
+ LOGE("stop front DUAL ");
+ rc = pthread_join(front_thread_id, NULL);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case m139572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_8(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int result = 0;
+
+ pthread_t back_thread_id, front_thread_id;
+ struct test_case_params bparams, fparams;
+ memset(&bparams, 0, sizeof(struct test_case_params));
+ memset(&fparams, 0, sizeof(struct test_case_params));
+ bparams.launch = 5;
+ bparams.preview = 5;
+ bparams.snapshot= 5;
+ fparams.launch = 5;
+ fparams.preview = 5;
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 8...\n");
+
+ LOGE("start back DUAL ");
+ rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+ LOGE("start front DUAL ");
+ rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+ sleep(1);
+ LOGE("stop back DUAL ");
+ rc = pthread_join(back_thread_id, NULL);
+ LOGE("stop front DUAL ");
+ rc = pthread_join(front_thread_id, NULL);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0)
+ printf("\nPassed\n");
+ else
+ printf("\nFailed\n");
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case m1395724c68 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_9(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int result = 0;
+
+ pthread_t back_thread_id, front_thread_id;
+ struct test_case_params bparams, fparams;
+ memset(&bparams, 0, sizeof(struct test_case_params));
+ memset(&fparams, 0, sizeof(struct test_case_params));
+ bparams.launch = 5;
+ bparams.preview = 5;
+ bparams.snapshot= 5;
+ fparams.launch = 5;
+ fparams.preview = 5;
+ fparams.snapshot = 5;
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 9...\n");
+
+ LOGE("start back DUAL ");
+ rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+ LOGE("start front DUAL ");
+ rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+ sleep(1);
+ LOGE("stop back DUAL ");
+ rc = pthread_join(back_thread_id, NULL);
+ LOGE("stop front DUAL ");
+ rc = pthread_join(front_thread_id, NULL);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case m13ab572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_10(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int result = 0;
+
+ pthread_t back_thread_id, front_thread_id;
+ struct test_case_params bparams, fparams;
+ memset(&bparams, 0, sizeof(struct test_case_params));
+ memset(&fparams, 0, sizeof(struct test_case_params));
+ bparams.launch = 5;
+ bparams.preview = 5;
+ bparams.recording= 5;
+ fparams.launch = 5;
+ fparams.preview = 5;
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 10...\n");
+
+ LOGE("start back DUAL ");
+ rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+ LOGE("start front DUAL ");
+ rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+ sleep(1);
+ LOGE("stop back DUAL ");
+ rc = pthread_join(back_thread_id, NULL);
+ LOGE("stop front DUAL ");
+ rc = pthread_join(front_thread_id, NULL);
+ LOGE("DUAL end \n");
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case m13ab5724c68 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_11(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int result = 0;
+
+ pthread_t back_thread_id, front_thread_id;
+ struct test_case_params bparams, fparams;
+ memset(&bparams, 0, sizeof(struct test_case_params));
+ memset(&fparams, 0, sizeof(struct test_case_params));
+ bparams.launch = 5;
+ bparams.preview = 5;
+ bparams.recording= 5;
+ fparams.launch = 5;
+ fparams.preview = 5;
+ fparams.snapshot = 5;
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 11...\n");
+
+ LOGE("start back DUAL ");
+ rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+ LOGE("start front DUAL ");
+ rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+ sleep(1);
+ LOGE("stop back DUAL ");
+ rc = pthread_join(back_thread_id, NULL);
+ LOGE("stop front DUAL ");
+ rc = pthread_join(front_thread_id, NULL);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case m1728 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_12(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int result = 0;
+
+ pthread_t back_thread_id, front_thread_id;
+ struct test_case_params bparams, fparams;
+ memset(&bparams, 0, sizeof(struct test_case_params));
+ memset(&fparams, 0, sizeof(struct test_case_params));
+ bparams.launch = 15;
+ fparams.launch = 15;
+ printf("\n Verifying Preview on back Camera and RDI on Front camera 12...\n");
+
+ LOGE("start back DUAL ");
+ rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+ LOGE("start front DUAL ");
+ rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+ sleep(1);
+ LOGE("stop back DUAL ");
+ rc = pthread_join(back_thread_id, NULL);
+ LOGE("stop front DUAL ");
+ rc = pthread_join(front_thread_id, NULL);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* Test case 2413(ab)5768
+ * Test the dual camera usecase. We startPreview on front camera,
+ * but backend will allocate RDI buffers and start front camera in
+ * RDI streaming mode. It then diverts RDI frames, converts them into YUV 420
+ * through C2D and generate preview data in the buffers allocated here.
+ * Back camera will use the pixel interface as usual.
+ */
+
+int mm_app_dtc_13(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j,k;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n 13. Verifying Preview + Recording on back Camera and Preview(through RDI) on Front camera\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for front \n");
+ if( MM_CAMERA_OK != (rc = startPreview(front_camera))) {
+ LOGE(" front camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ usleep(20*1000);
+
+ for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++){
+ LOGE("DUAL open back camera %d \n",k);
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ usleep(30*1000);
+
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ LOGE("DUAL start camera record for back Iteration %d \n", j);
+ if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+ LOGE(" StartVideorecording() err=%d\n", rc);
+ break;
+ }
+
+ mm_camera_app_wait();
+ usleep(10*1000*1000);
+ LOGE("DUAL stop camera record for back Iteration %d\n", j);
+ if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+ LOGE(" Stopvideorecording() err=%d\n", rc);
+ break;
+ }
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL stop camera Preview for back \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+ LOGE(" stopPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+
+ LOGE("DUAL close back camera\n");
+ if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ usleep(20*1000);
+ }
+ LOGE("DUAL stop camera Preview for Rdi \n");
+ if( MM_CAMERA_OK != (rc = stopPreview(front_camera))) {
+ LOGE(" stopPreview() frontcamera err=%d\n", rc);
+ goto end;
+ }
+ usleep(10*1000);
+ LOGE("DUAL close front camera \n");
+ if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/*Below 6 are reference test cases just to test the open path for dual camera*/
+int mm_app_dtc_1243(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera Preview for back \n");
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_2134(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera Preview for front \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera Rdi for back \n");
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+int mm_app_dtc_2143(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera preview for back \n");
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_2413(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera rdi for front \n");
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera preview for back \n");
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_1234(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+ LOGE("DUAL open back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL open front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ LOGE("DUAL start camera preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+
+ LOGE("DUAL start camera rdi for front \n");
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_1324(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+ LOGE("DUAL start back camera \n");
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL start camera preview for back \n");
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+ //mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL start front camera \n");
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ LOGE("DUAL start rdi preview \n");
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+ sleep(1);
+ LOGE("DUAL end \n");
+
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/* single camera test cases*/
+int mm_app_dtc_s_0(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+
+ mm_camera_app_wait();
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() front camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ LOGE(" startPreview() backcamera err=%d\n", rc);
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_s_1(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Snapshot on front and back camera...\n");
+ for(i = 0; i < cam_apps->num_cameras; i++) {
+ if( mm_app_open(i) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ LOGE(" startPreview() err=%d\n", rc);
+ break;
+ }
+ for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if( MM_CAMERA_OK != (rc = takePicture_yuv(my_cam_app.cam_open))) {
+ LOGE(" TakePicture() err=%d\n", rc);
+ break;
+ }
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ LOGE(" Snapshot/Preview Callback not received in time or qbuf Faile\n");
+ break;
+ }*/
+ mm_camera_app_wait();
+ result++;
+ }
+ if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ break;
+ }
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(result != MM_QCAMERA_APP_INTERATION) {
+ printf(" Snapshot Start/Stop Fails for Camera %d in %d iteration", i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+end:
+ if(rc == 0) {
+ printf("\t***Passed***\n");
+ }else{
+ printf("\t***Failed***\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_s_2(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Video on front and back camera...\n");
+ for(i = 0; i < cam_apps->num_cameras; i++) {
+ if( mm_app_open(i) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ LOGE(" startPreview() err=%d\n", rc);
+ break;
+ }
+ for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if( MM_CAMERA_OK != (rc = startRecording(my_cam_app.cam_open))) {
+ LOGE(" StartVideorecording() err=%d\n", rc);
+ break;
+ }
+
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ LOGE(" Video Callback not received in time\n");
+ break;
+ }*/
+ mm_camera_app_wait();
+ if( MM_CAMERA_OK != (rc = stopRecording(my_cam_app.cam_open))) {
+ LOGE(" Stopvideorecording() err=%d\n", rc);
+ break;
+ }
+ result++;
+ }
+ if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ break;
+ }
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(result != MM_QCAMERA_APP_INTERATION) {
+ printf(" Video Start/Stop Fails for Camera %d in %d iteration", i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_dtc_s_3(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying RDI Stream on front and back camera...\n");
+ if(cam_apps->num_cameras == 0) {
+ LOGE("Query Failed: Num of cameras = %d\n", cam_apps->num_cameras);
+ rc = -1;
+ goto end;
+ }
+ for(i = 0; i < cam_apps->num_cameras; i++) {
+ if( mm_app_open(i) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if( MM_CAMERA_OK != (rc = startRdi(my_cam_app.cam_open))) {
+ LOGE(" StartVideorecording() err=%d\n", rc);
+ break;
+ }
+
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ LOGE(" Video Callback not received in time\n");
+ break;
+ }*/
+ mm_camera_app_wait();
+ if( MM_CAMERA_OK != (rc = stopRdi(my_cam_app.cam_open))) {
+ LOGE(" Stopvideorecording() err=%d\n", rc);
+ break;
+ }
+ result++;
+ }
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(result != MM_QCAMERA_APP_INTERATION) {
+ printf(" Video Start/Stop Fails for Camera %d in %d iteration", i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+/*Stats Test Case*/
+int mm_app_dtc_s_5(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ LOGE("mm_app_open() back camera err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ LOGE("system_dimension_set() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startStats(back_camera))) {
+ LOGE(" back camera startPreview() err=%d\n", rc);
+ goto end;
+ }
+
+ mm_camera_app_wait();
+
+ if( MM_CAMERA_OK != (rc = stopStats(my_cam_app.cam_open))) {
+ LOGD(" startPreview() err=%d\n", rc);
+ goto end;
+ }
+
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ LOGE("mm_app_close() err=%d\n", rc);
+ rc = -1;
+ goto end;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_gen_dual_test_cases()
+{
+ int tc = 0;
+ memset(mm_app_tc, 0, sizeof(mm_app_tc));
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_0;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_1;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_2;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_3;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_4;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_5;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_6;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_7;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_8;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_9;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_10;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_11;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_12;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_13;
+
+ return tc;
+}
+
+int mm_app_dual_test_entry(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, tc = 0;
+ int cam_id = 0;
+
+ tc = mm_app_gen_dual_test_cases();
+ LOGD("Running %d test cases\n",tc);
+ for(i = 0; i < tc; i++) {
+ mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+ if(mm_app_tc[i].r != MM_CAMERA_OK) {
+ printf(" test case %d error = %d, abort unit testing engine!!!!\n",
+ i, mm_app_tc[i].r);
+ rc = mm_app_tc[i].r;
+ goto end;
+ }
+ }
+end:
+ printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+ return rc;
+}
+
+
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
new file mode 100644
index 0000000..0865c6f
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
@@ -0,0 +1,2047 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <ctype.h>
+#include <errno.h>
+
+// Camera dependencies
+#include "mm_qcamera_main_menu.h"
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+/*===========================================================================
+ * Macro
+ *===========================================================================*/
+#define MIN(X,Y) ((X) < (Y) ? (X) : (Y))
+#define VIDEO_BUFFER_SIZE (PREVIEW_WIDTH * PREVIEW_HEIGHT * 3/2)
+#define THUMBNAIL_BUFFER_SIZE (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define SNAPSHOT_BUFFER_SIZE (PICTURE_WIDTH * PICTURE_HEIGHT * 3/2)
+//TODO:check this Macros with current app.
+
+/*===========================================================================
+ * Defines
+ *===========================================================================*/
+//#define VIDEO_FRAMES_NUM 4
+#define THUMBNAIL_FRAMES_NUM 1
+#define SNAPSHOT_FRAMES_NUM 1
+#define MAX_NUM_FORMAT 32
+#define ZOOM_STEP 2
+#define ZOOM_MIN_VALUE 0
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+
+//TODO: find correct values of Contrast defines.
+#define CAMERA_MIN_CONTRAST 0
+#define CAMERA_DEF_CONTRAST 5
+#define CAMERA_MAX_CONTRAST 10
+#define CAMERA_CONTRAST_STEP 1
+
+//TODO: find correct values of Brightness defines.
+#define CAMERA_MIN_BRIGHTNESS 0
+#define CAMERA_DEF_BRIGHTNESS 3
+#define CAMERA_MAX_BRIGHTNESS 6
+#define CAMERA_BRIGHTNESS_STEP 1
+
+//TODO: find correct values of Saturation defines.
+#define CAMERA_MIN_SATURATION 0
+#define CAMERA_DEF_SATURATION 5
+#define CAMERA_MAX_SATURATION 10
+#define CAMERA_SATURATION_STEP 1
+
+#define CAMERA_MIN_SHARPNESS 0
+#define CAMERA_MAX_SHARPNESS 10
+#define CAMERA_DEF_SHARPNESS 5
+#define CAMERA_SHARPNESS_STEP 1
+
+const CAMERA_MAIN_MENU_TBL_T camera_main_menu_tbl[] = {
+ {START_PREVIEW, "Start preview"},
+ {STOP_PREVIEW, "Stop preview/video"},
+ {SET_WHITE_BALANCE, "Set white balance mode"},
+ {SET_TINTLESS_ENABLE, "Set Tintless Enable"},
+ {SET_TINTLESS_DISABLE, "Set Tintless Disable"},
+ {SET_EXP_METERING, "Set exposure metering mode"},
+ {GET_CTRL_VALUE, "Get control value menu"},
+ {TOGGLE_AFR, "Toggle auto frame rate. Default fixed frame rate"},
+ {SET_ISO, "ISO changes."},
+ {BRIGHTNESS_GOTO_SUBMENU, "Brightness changes."},
+ {CONTRAST_GOTO_SUBMENU, "Contrast changes."},
+ {EV_GOTO_SUBMENU, "EV changes."},
+ {SATURATION_GOTO_SUBMENU, "Saturation changes."},
+ {SET_ZOOM, "Set Digital Zoom."},
+ {SET_SHARPNESS, "Set Sharpness."},
+ {TAKE_JPEG_SNAPSHOT, "Take a snapshot"},
+ {START_RECORDING, "Start RECORDING"},
+ {STOP_RECORDING, "Stop RECORDING"},
+ {BEST_SHOT, "Set best-shot mode"},
+ {LIVE_SHOT, "Take a live snapshot"},
+ {FLASH_MODES, "Set Flash modes"},
+ {TOGGLE_ZSL, "Toggle ZSL On/Off"},
+ {TAKE_RAW_SNAPSHOT, "Take RAW snapshot"},
+ {SWITCH_SNAP_RESOLUTION, "Select Jpeg resolution"},
+ {TOGGLE_WNR, "Toggle Wavelet Denoise"},
+ {EXIT, "Exit"}
+};
+
+CAMERA_SENSOR_MENU_TLB_T sensor_tbl[] = {
+ {"Primary Camera", 0},
+ {"Secondary Camera", 0},
+ {"Camera Sensor 3", 0},
+ {"Camera Sensor 4", 0}
+};
+
+const CAMERA_BRIGHTNESS_TBL_T brightness_change_tbl[] = {
+ {INC_BRIGHTNESS, "Increase Brightness by one step."},
+ {DEC_BRIGHTNESS, "Decrease Brightness by one step."},
+};
+
+const CAMERA_CONTRST_TBL_T contrast_change_tbl[] = {
+ {INC_CONTRAST, "Increase Contrast by one step."},
+ {DEC_CONTRAST, "Decrease Contrast by one step."},
+};
+
+const CAMERA_EV_TBL_T camera_EV_tbl[] = {
+ {INCREASE_EV, "Increase EV by one step."},
+ {DECREASE_EV, "Decrease EV by one step."},
+};
+
+const CAMERA_SATURATION_TBL_T camera_saturation_tbl[] = {
+ {INC_SATURATION, "Increase Satuation by one step."},
+ {DEC_SATURATION, "Decrease Satuation by one step."},
+};
+
+const CAMERA_SHARPNESS_TBL_T camera_sharpness_tbl[] = {
+ {INC_SHARPNESS, "Increase Sharpness."},
+ {DEC_SHARPNESS, "Decrease Sharpness."},
+};
+
+const WHITE_BALANCE_TBL_T white_balance_tbl[] = {
+ { WB_AUTO, "White Balance - Auto"},
+ { WB_INCANDESCENT, "White Balance - Incandescent"},
+ { WB_FLUORESCENT, "White Balance - Fluorescent"},
+ { WB_WARM_FLUORESCENT, "White Balance - Warm Fluorescent"},
+ { WB_DAYLIGHT, "White Balance - Daylight"},
+ { WB_CLOUDY_DAYLIGHT, "White Balance - Cloudy Daylight"},
+ { WB_TWILIGHT, "White Balance - Twilight"},
+ { WB_SHADE, "White Balance - Shade"},
+};
+
+const GET_CTRL_TBL_T get_ctrl_tbl[] = {
+ { WHITE_BALANCE_STATE, "Get white balance state (auto/off)"},
+ { WHITE_BALANCE_TEMPERATURE, "Get white balance temperature"},
+ { BRIGHTNESS_CTRL, "Get brightness value"},
+ { EV, "Get exposure value"},
+ { CONTRAST_CTRL, "Get contrast value"},
+ { SATURATION_CTRL, "Get saturation value"},
+ { SHARPNESS_CTRL, "Get sharpness value"},
+};
+
+const EXP_METERING_TBL_T exp_metering_tbl[] = {
+ { AUTO_EXP_FRAME_AVG, "Exposure Metering - Frame Average"},
+ { AUTO_EXP_CENTER_WEIGHTED, "Exposure Metering - Center Weighted"},
+ { AUTO_EXP_SPOT_METERING, "Exposure Metering - Spot Metering"},
+ { AUTO_EXP_SMART_METERING, "Exposure Metering - Smart Metering"},
+ { AUTO_EXP_USER_METERING, "Exposure Metering - User Metering"},
+ { AUTO_EXP_SPOT_METERING_ADV, "Exposure Metering - Spot Metering Adv"},
+ { AUTO_EXP_CENTER_WEIGHTED_ADV,"Exposure Metering - Center Weighted Adv"},
+};
+
+const ISO_TBL_T iso_tbl[] = {
+ { ISO_AUTO, "ISO: Auto"},
+ { ISO_DEBLUR, "ISO: Deblur"},
+ { ISO_100, "ISO: 100"},
+ { ISO_200, "ISO: 200"},
+ { ISO_400, "ISO: 400"},
+ { ISO_800, "ISO: 800"},
+ { ISO_1600, "ISO: 1600"},
+};
+
+const ZOOM_TBL_T zoom_tbl[] = {
+ { ZOOM_IN, "Zoom In one step"},
+ { ZOOM_OUT, "Zoom Out one step"},
+};
+
+const BESTSHOT_MODE_TBT_T bestshot_mode_tbl[] = {
+ {BESTSHOT_AUTO, "Bestshot Mode: Auto"},
+ {BESTSHOT_ACTION, "Bestshot Mode: Action"},
+ {BESTSHOT_PORTRAIT, "Bestshot Mode: Portrait"},
+ {BESTSHOT_LANDSCAPE, "Bestshot Mode: Landscape"},
+ {BESTSHOT_NIGHT, "Bestshot Mode: Night"},
+ {BESTSHOT_NIGHT_PORTRAIT, "Bestshot Mode: Night Portrait"},
+ {BESTSHOT_THEATRE, "Bestshot Mode: Theatre"},
+ {BESTSHOT_BEACH, "Bestshot Mode: Beach"},
+ {BESTSHOT_SNOW, "Bestshot Mode: Snow"},
+ {BESTSHOT_SUNSET, "Bestshot Mode: Sunset"},
+ {BESTSHOT_ANTISHAKE, "Bestshot Mode: Antishake"},
+ {BESTSHOT_FIREWORKS, "Bestshot Mode: Fireworks"},
+ {BESTSHOT_SPORTS, "Bestshot Mode: Sports"},
+ {BESTSHOT_PARTY, "Bestshot Mode: Party"},
+ {BESTSHOT_CANDLELIGHT, "Bestshot Mode: Candlelight"},
+ {BESTSHOT_ASD, "Bestshot Mode: ASD"},
+ {BESTSHOT_BACKLIGHT, "Bestshot Mode: Backlight"},
+ {BESTSHOT_FLOWERS, "Bestshot Mode: Flowers"},
+ {BESTSHOT_AR, "Bestshot Mode: Augmented Reality"},
+ {BESTSHOT_HDR, "Bestshot Mode: HDR"},
+};
+
+const FLASH_MODE_TBL_T flashmodes_tbl[] = {
+ { FLASH_MODE_OFF, "Flash Mode Off"},
+ { FLASH_MODE_AUTO, "Flash Mode Auto"},
+ { FLASH_MODE_ON, "Flash Mode On"},
+ { FLASH_MODE_TORCH, "Flash Mode Torch"},
+};
+
+DIMENSION_TBL_T dimension_tbl[] = {
+{VGA_WIDTH, VGA_HEIGHT, "VGA", "Size: VGA <640x480>" , 0},
+{MP1_WIDTH, MP1_HEIGHT, "1MP", "Size: 1MP <1280x960>" , 0},
+{MP5_WIDTH, MP5_HEIGHT, "5MP", "Size: 5MP <2592x1944>", 0},
+{MP8_WIDTH, MP8_HEIGHT, "8MP", "Size: 8MP <3264x2448>", 0},
+{MP12_WIDTH, MP12_HEIGHT, "12MP", "Size: 12MP <4000x3000>", 0},
+};
+
+/*===========================================================================
+ * Forward declarations
+ *===========================================================================*/
+//static void system_dimension_set(mm_camera_test_obj_t *test_obj);
+/*===========================================================================
+ * Static global variables
+ *===========================================================================*/
+USER_INPUT_DISPLAY_T input_display;
+int preview_video_resolution_flag = 0;
+
+//TODO: default values.
+#if 1
+int brightness = CAMERA_DEF_BRIGHTNESS;
+int contrast = CAMERA_DEF_CONTRAST;
+int saturation = CAMERA_DEF_SATURATION;
+int sharpness = CAMERA_DEF_SHARPNESS;
+#else
+int brightness = 0;
+int contrast = 0;
+int saturation = 0;
+int sharpness = 0;
+#endif
+//TODO: find new method to calculate ev.
+//int32_t ev_numerator = EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR;
+
+//TODO:
+//fps_mode_t fps_mode = FPS_MODE_FIXED;
+int zoom_level;
+int zoom_max_value;
+int cam_id;
+int is_rec = 0;
+
+
+static int submain();
+
+/*===========================================================================
+ * FUNCTION - keypress_to_event -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int keypress_to_event(char keypress)
+{
+ int out_buf = INVALID_KEY_PRESS;
+ if ((keypress >= 'A' && keypress <= 'Z') ||
+ (keypress >= 'a' && keypress <= 'z')) {
+ out_buf = tolower(keypress);
+ out_buf = out_buf - 'a';
+ } else if (keypress >= '0' && keypress <= '9') {
+ out_buf = keypress - '0';
+ }
+ return out_buf;
+}
+
+int next_menu(menu_id_change_t current_menu_id, char keypress, camera_action_t * action_id_ptr, int * action_param)
+{
+ int output_to_event;
+ menu_id_change_t next_menu_id = MENU_ID_INVALID;
+ * action_id_ptr = ACTION_NO_ACTION;
+
+ output_to_event = keypress_to_event(keypress);
+ LOGD("current_menu_id=%d\n",current_menu_id);
+ printf("output_to_event=%d\n",output_to_event);
+ switch(current_menu_id) {
+ case MENU_ID_MAIN:
+ switch(output_to_event) {
+ case START_PREVIEW:
+ * action_id_ptr = ACTION_START_PREVIEW;
+ LOGD("START_PREVIEW\n");
+ break;
+ case STOP_PREVIEW:
+ * action_id_ptr = ACTION_STOP_PREVIEW;
+ LOGD("STOP_PREVIEW\n");
+ break;
+
+ case SET_WHITE_BALANCE:
+ next_menu_id = MENU_ID_WHITEBALANCECHANGE;
+ LOGD("next_menu_id = MENU_ID_WHITEBALANCECHANGE = %d\n", next_menu_id);
+ break;
+
+ case SET_TINTLESS_ENABLE:
+ * action_id_ptr = ACTION_SET_TINTLESS_ENABLE;
+ next_menu_id = MENU_ID_MAIN;
+ LOGD("next_menu_id = MENU_ID_TINTLESSENABLE = %d\n", next_menu_id);
+ break;
+
+ case SET_TINTLESS_DISABLE:
+ * action_id_ptr = ACTION_SET_TINTLESS_DISABLE;
+ next_menu_id = MENU_ID_MAIN;
+ LOGD("next_menu_id = MENU_ID_TINTLESSDISABLE = %d\n", next_menu_id);
+ break;
+
+ case SET_EXP_METERING:
+ next_menu_id = MENU_ID_EXPMETERINGCHANGE;
+ LOGD("next_menu_id = MENU_ID_EXPMETERINGCHANGE = %d\n", next_menu_id);
+ break;
+
+ case GET_CTRL_VALUE:
+ next_menu_id = MENU_ID_GET_CTRL_VALUE;
+ LOGD("next_menu_id = MENU_ID_GET_CTRL_VALUE = %d\n", next_menu_id);
+ break;
+
+ case BRIGHTNESS_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_BRIGHTNESSCHANGE;
+ LOGD("next_menu_id = MENU_ID_BRIGHTNESSCHANGE = %d\n", next_menu_id);
+ break;
+
+ case CONTRAST_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_CONTRASTCHANGE;
+ break;
+
+ case EV_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_EVCHANGE;
+ break;
+
+ case SATURATION_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_SATURATIONCHANGE;
+ break;
+
+ case TOGGLE_AFR:
+ * action_id_ptr = ACTION_TOGGLE_AFR;
+ LOGD("next_menu_id = MENU_ID_TOGGLEAFR = %d\n", next_menu_id);
+ break;
+
+ case SET_ISO:
+ next_menu_id = MENU_ID_ISOCHANGE;
+ LOGD("next_menu_id = MENU_ID_ISOCHANGE = %d\n", next_menu_id);
+ break;
+
+ case SET_ZOOM:
+ next_menu_id = MENU_ID_ZOOMCHANGE;
+ LOGD("next_menu_id = MENU_ID_ZOOMCHANGE = %d\n", next_menu_id);
+ break;
+
+ case BEST_SHOT:
+ next_menu_id = MENU_ID_BESTSHOT;
+ LOGD("next_menu_id = MENU_ID_BESTSHOT = %d\n", next_menu_id);
+ break;
+
+ case LIVE_SHOT:
+ * action_id_ptr = ACTION_TAKE_LIVE_SNAPSHOT;
+ LOGD("\nTaking Live snapshot\n");
+ break;
+
+ case FLASH_MODES:
+ next_menu_id = MENU_ID_FLASHMODE;
+ LOGD("next_menu_id = MENU_ID_FLASHMODE = %d\n", next_menu_id);
+ break;
+
+ case SET_SHARPNESS:
+ next_menu_id = MENU_ID_SHARPNESSCHANGE;
+ LOGD("next_menu_id = MENU_ID_SHARPNESSCHANGE = %d\n", next_menu_id);
+ break;
+
+ case SWITCH_SNAP_RESOLUTION:
+ next_menu_id = MENU_ID_SWITCH_RES;
+ LOGD("next_menu_id = MENU_ID_SWITCH_RES = %d\n", next_menu_id);
+ break;
+
+ case TAKE_JPEG_SNAPSHOT:
+ * action_id_ptr = ACTION_TAKE_JPEG_SNAPSHOT;
+ printf("\n Taking JPEG snapshot\n");
+ break;
+
+ case START_RECORDING:
+ * action_id_ptr = ACTION_START_RECORDING;
+ LOGD("Start recording\n");
+ break;
+ case STOP_RECORDING:
+ * action_id_ptr = ACTION_STOP_RECORDING;
+ LOGD("Stop recording\n");
+ break;
+ case TOGGLE_ZSL:
+ * action_id_ptr = ACTION_TOGGLE_ZSL;
+ LOGD("Toggle ZSL\n");
+ break;
+ case TAKE_RAW_SNAPSHOT:
+ * action_id_ptr = ACTION_TAKE_RAW_SNAPSHOT;
+ next_menu_id = MENU_ID_MAIN;
+ LOGD("Capture RAW\n");
+ break;
+ case TOGGLE_WNR:
+ * action_id_ptr = ACTION_TOGGLE_WNR;
+ next_menu_id = MENU_ID_MAIN;
+ LOGD("Toggle WNR");
+ break;
+ case EXIT:
+ * action_id_ptr = ACTION_EXIT;
+ LOGD("Exit \n");
+ break;
+ default:
+ next_menu_id = MENU_ID_MAIN;
+ LOGD("next_menu_id = MENU_ID_MAIN = %d\n", next_menu_id);
+ break;
+ }
+ break;
+
+ case MENU_ID_SWITCH_RES:
+ printf("MENU_ID_SWITCH_RES\n");
+ *action_id_ptr = ACTION_SWITCH_RESOLUTION;
+ *action_param = output_to_event;
+ int available_sizes = sizeof(dimension_tbl)/sizeof(dimension_tbl[0]);
+ if ( ( *action_param >= 0 ) &&
+ ( *action_param < available_sizes ) &&
+ ( dimension_tbl[*action_param].supported )) {
+ next_menu_id = MENU_ID_MAIN;
+ }
+ else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_SENSORS:
+ next_menu_id = MENU_ID_MAIN;
+ *action_id_ptr = ACTION_SWITCH_CAMERA;
+ *action_param = output_to_event;
+ break;
+
+ case MENU_ID_WHITEBALANCECHANGE:
+ printf("MENU_ID_WHITEBALANCECHANGE\n");
+ if (output_to_event >= WB_MAX) {
+ next_menu_id = current_menu_id;
+ * action_id_ptr = ACTION_NO_ACTION;
+ } else {
+ next_menu_id = MENU_ID_MAIN;
+ * action_id_ptr = ACTION_SET_WHITE_BALANCE;
+ * action_param = output_to_event;
+ }
+ break;
+
+ case MENU_ID_EXPMETERINGCHANGE:
+ printf("MENU_ID_EXPMETERINGCHANGE\n");
+ if (output_to_event >= AUTO_EXP_MAX) {
+ next_menu_id = current_menu_id;
+ * action_id_ptr = ACTION_NO_ACTION;
+ } else {
+ next_menu_id = MENU_ID_MAIN;
+ * action_id_ptr = ACTION_SET_EXP_METERING;
+ * action_param = output_to_event;
+ }
+ break;
+
+ case MENU_ID_GET_CTRL_VALUE:
+ printf("MENU_ID_GET_CTRL_VALUE\n");
+ * action_id_ptr = ACTION_GET_CTRL_VALUE;
+ if (output_to_event > 0 &&
+ output_to_event <= (int)(sizeof(get_ctrl_tbl)/sizeof(get_ctrl_tbl[0]))) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ }
+ else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_BRIGHTNESSCHANGE:
+ switch (output_to_event) {
+ case INC_BRIGHTNESS:
+ * action_id_ptr = ACTION_BRIGHTNESS_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DEC_BRIGHTNESS:
+ * action_id_ptr = ACTION_BRIGHTNESS_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_BRIGHTNESSCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_CONTRASTCHANGE:
+ switch (output_to_event) {
+ case INC_CONTRAST:
+ * action_id_ptr = ACTION_CONTRAST_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DEC_CONTRAST:
+ * action_id_ptr = ACTION_CONTRAST_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_CONTRASTCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_EVCHANGE:
+ switch (output_to_event) {
+ case INCREASE_EV:
+ * action_id_ptr = ACTION_EV_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DECREASE_EV:
+ * action_id_ptr = ACTION_EV_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_EVCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_SATURATIONCHANGE:
+ switch (output_to_event) {
+ case INC_SATURATION:
+ * action_id_ptr = ACTION_SATURATION_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DEC_SATURATION:
+ * action_id_ptr = ACTION_SATURATION_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_EVCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_ISOCHANGE:
+ printf("MENU_ID_ISOCHANGE\n");
+ if (output_to_event >= ISO_MAX) {
+ next_menu_id = current_menu_id;
+ * action_id_ptr = ACTION_NO_ACTION;
+ } else {
+ next_menu_id = MENU_ID_MAIN;
+ * action_id_ptr = ACTION_SET_ISO;
+ * action_param = output_to_event;
+ }
+ break;
+
+ case MENU_ID_ZOOMCHANGE:
+ * action_id_ptr = ACTION_SET_ZOOM;
+ if (output_to_event > 0 &&
+ output_to_event <= (int)(sizeof(zoom_tbl)/sizeof(zoom_tbl[0]))) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ } else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_SHARPNESSCHANGE:
+ switch (output_to_event) {
+ case INC_SHARPNESS:
+ * action_id_ptr = ACTION_SHARPNESS_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+ case DEC_SHARPNESS:
+ * action_id_ptr = ACTION_SHARPNESS_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+ default:
+ next_menu_id = MENU_ID_SHARPNESSCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_BESTSHOT:
+ if (output_to_event >= BESTSHOT_MAX) {
+ next_menu_id = current_menu_id;
+ * action_id_ptr = ACTION_NO_ACTION;
+ } else {
+ next_menu_id = MENU_ID_MAIN;
+ * action_id_ptr = ACTION_SET_BESTSHOT_MODE;
+ * action_param = output_to_event;
+ }
+ break;
+
+ case MENU_ID_FLASHMODE:
+ if (output_to_event >= FLASH_MODE_MAX) {
+ next_menu_id = current_menu_id;
+ * action_id_ptr = ACTION_NO_ACTION;
+ } else {
+ next_menu_id = MENU_ID_MAIN;
+ * action_id_ptr = ACTION_SET_FLASH_MODE;
+ * action_param = output_to_event;
+ }
+ break;
+
+ default:
+ LOGD("menu id is wrong: %d\n", current_menu_id);
+ break;
+ }
+
+ return next_menu_id;
+}
+
+/*===========================================================================
+ * FUNCTION - print_menu_preview_video -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+static void print_menu_preview_video(void) {
+ unsigned int i;
+ if (!is_rec) {
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in preview/video mode now \n");
+ printf("===========================================\n\n");
+ } else {
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in RECORDING mode now \n");
+ printf(" Press 'Q' To Stop Recording \n");
+ printf(" Press 'S' To Take Live Snapshot \n");
+ printf("===========================================\n\n");
+ }
+ char menuNum = 'A';
+ for (i = 0; i < sizeof(camera_main_menu_tbl)/sizeof(camera_main_menu_tbl[0]); i++) {
+ if (i == BASE_OFFSET) {
+ menuNum = '1';
+ }
+
+ printf("%c. %s\n", menuNum, camera_main_menu_tbl[i].menu_name);
+ menuNum++;
+ }
+
+ printf("\nPlease enter your choice: ");
+
+ return;
+}
+
+static void camera_preview_video_wb_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in white balance change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(white_balance_tbl) /
+ sizeof(white_balance_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, white_balance_tbl[i].wb_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for White Balance modes: ");
+ return;
+}
+
+static void camera_preview_video_get_ctrl_value_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in get control value mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(get_ctrl_tbl) /
+ sizeof(get_ctrl_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, get_ctrl_tbl[i].get_ctrl_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for control value you want to get: ");
+ return;
+}
+
+static void camera_preview_video_exp_metering_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in exposure metering change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(exp_metering_tbl) /
+ sizeof(exp_metering_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, exp_metering_tbl[i].exp_metering_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for exposure metering modes: ");
+ return;
+}
+
+static void camera_contrast_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in change contrast resolution mode \n");
+ printf("==========================================================\n\n");
+
+ char contrastmenuNum = 'A';
+ for (i = 0; i < sizeof(contrast_change_tbl) /
+ sizeof(contrast_change_tbl[0]); i++) {
+ printf("%c. %s\n", contrastmenuNum,
+ contrast_change_tbl[i].contrast_name);
+ contrastmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for contrast Change: ");
+ return;
+}
+
+static void camera_EV_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in EV change mode now \n");
+ printf("===========================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0; i < sizeof(camera_EV_tbl)/sizeof(camera_EV_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, camera_EV_tbl[i].EV_name);
+ submenuNum++;
+ }
+
+ printf("\nPlease enter your choice for EV changes: ");
+ return;
+}
+
+static void camera_resolution_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in snapshot resolution mode \n");
+ printf("==========================================================\n\n");
+
+ for (i = 0; i < sizeof(dimension_tbl) /
+ sizeof(dimension_tbl[0]); i++) {
+ if ( dimension_tbl[i].supported ) {
+ printf("%d. %s\n", i,
+ dimension_tbl[i].str_name);
+ }
+ }
+
+ printf("\nPlease enter your choice for Resolution: ");
+ return;
+}
+
+static void camera_preview_video_zoom_change_tbl(void) {
+ unsigned int i;
+ zoom_max_value = MAX_ZOOMS_CNT;
+ printf("\nCurrent Zoom Value = %d ,Max Zoom Value = %d\n",zoom_level,zoom_max_value);
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(zoom_tbl) /
+ sizeof(zoom_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, zoom_tbl[i].zoom_direction_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for zoom change direction: ");
+ return;
+}
+
+static void camera_brightness_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in change brightness mode \n");
+ printf("==========================================================\n\n");
+
+ char brightnessmenuNum = 'A';
+ for (i = 0; i < sizeof(brightness_change_tbl) /
+ sizeof(brightness_change_tbl[0]); i++) {
+ printf("%c. %s\n", brightnessmenuNum,
+ brightness_change_tbl[i].brightness_name);
+ brightnessmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for Brightness Change: ");
+ return;
+}
+
+static void camera_saturation_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in change saturation mode \n");
+ printf("==========================================================\n\n");
+
+ char saturationmenuNum = 'A';
+ for (i = 0; i < sizeof(camera_saturation_tbl) /
+ sizeof(camera_saturation_tbl[0]); i++) {
+ printf("%c. %s\n", saturationmenuNum,
+ camera_saturation_tbl[i].saturation_name);
+ saturationmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for Saturation Change: ");
+ return;
+}
+
+static void camera_preview_video_iso_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in ISO change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(iso_tbl) /
+ sizeof(iso_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, iso_tbl[i].iso_modes_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for iso modes: ");
+ return;
+}
+
+static void camera_preview_video_sharpness_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in sharpness change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(camera_sharpness_tbl) /
+ sizeof(camera_sharpness_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, camera_sharpness_tbl[i].sharpness_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for sharpness modes: ");
+ return;
+}
+
+static void camera_set_bestshot_tbl(void)
+{
+ unsigned int i;
+
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in set besthot mode now \n");
+ printf("===========================================\n\n");
+
+
+ char bsmenuNum = 'A';
+ for (i = 0; i < sizeof(bestshot_mode_tbl)/sizeof(bestshot_mode_tbl[0]); i++) {
+ printf("%c. %s\n", bsmenuNum,
+ bestshot_mode_tbl[i].name);
+ bsmenuNum++;
+ }
+
+ printf("\nPlease enter your choice of Bestshot Mode: ");
+ return;
+}
+
+static void camera_set_flashmode_tbl(void)
+{
+ unsigned int i;
+
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in set flash mode now \n");
+ printf("===========================================\n\n");
+
+
+ char bsmenuNum = 'A';
+ for (i = 0; i < sizeof(flashmodes_tbl)/sizeof(flashmodes_tbl[0]); i++) {
+ printf("%c. %s\n", bsmenuNum,
+ flashmodes_tbl[i].name);
+ bsmenuNum++;
+ }
+
+ printf("\nPlease enter your choice of Bestshot Mode: ");
+ return;
+}
+
+static void camera_sensors_tbl(void)
+{
+ unsigned int i;
+ size_t available_sensors = sizeof(sensor_tbl)/sizeof(sensor_tbl[0]);
+
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera Sensor to be used: \n");
+ printf("===========================================\n\n");
+
+
+ char bsmenuNum = 'A';
+ for (i = 0; ( i < available_sensors ) && ( sensor_tbl[i].present ) ; i++) {
+ printf("%c. %s\n", bsmenuNum,
+ sensor_tbl[i].menu_name);
+ bsmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for sensor: ");
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION - increase_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_contrast (mm_camera_lib_handle *lib_handle) {
+ contrast += CAMERA_CONTRAST_STEP;
+ if (contrast > CAMERA_MAX_CONTRAST) {
+ contrast = CAMERA_MAX_CONTRAST;
+ printf("Reached max CONTRAST. \n");
+ }
+ printf("Increase Contrast to %d\n", contrast);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_CONTRAST,
+ &contrast,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_contrast (mm_camera_lib_handle *lib_handle) {
+ contrast -= CAMERA_CONTRAST_STEP;
+ if (contrast < CAMERA_MIN_CONTRAST) {
+ contrast = CAMERA_MIN_CONTRAST;
+ printf("Reached min CONTRAST. \n");
+ }
+ printf("Decrease Contrast to %d\n", contrast);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_CONTRAST,
+ &contrast,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_brightness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_brightness (mm_camera_lib_handle *lib_handle) {
+ brightness -= CAMERA_BRIGHTNESS_STEP;
+ if (brightness < CAMERA_MIN_BRIGHTNESS) {
+ brightness = CAMERA_MIN_BRIGHTNESS;
+ printf("Reached min BRIGHTNESS. \n");
+ }
+ printf("Decrease Brightness to %d\n", brightness);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_BRIGHTNESS,
+ &brightness,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - increase_brightness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_brightness (mm_camera_lib_handle *lib_handle) {
+ brightness += CAMERA_BRIGHTNESS_STEP;
+ if (brightness > CAMERA_MAX_BRIGHTNESS) {
+ brightness = CAMERA_MAX_BRIGHTNESS;
+ printf("Reached max BRIGHTNESS. \n");
+ }
+ printf("Increase Brightness to %d\n", brightness);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_BRIGHTNESS,
+ &brightness,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - increase_EV -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+
+int increase_EV (void) {
+#if 0
+ int rc = 0;
+ int32_t value = 0;
+ rc = cam_config_is_parm_supported(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+ if(!rc) {
+ printf("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+ return -1;
+ }
+ ev_numerator += 1;
+ if(ev_numerator >= EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR &&
+ ev_numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+ int16_t numerator16 = (int16_t)(ev_numerator & 0x0000ffff);
+ uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+ value = numerator16 << 16 | denominator16;
+ } else {
+ printf("Reached max EV.\n");
+ }
+ return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION, value);
+#endif
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_EV -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_EV (void) {
+#if 0
+ int rc = 0;
+ int32_t value = 0;
+ rc = cam_config_is_parm_supported(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+ if(!rc) {
+ printf("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+ return -1;
+ }
+ ev_numerator -= 1;
+ if(ev_numerator >= EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR &&
+ ev_numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+ int16_t numerator16 = (int16_t)(ev_numerator & 0x0000ffff);
+ uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+ value = numerator16 << 16 | denominator16;
+ } else {
+ printf("Reached min EV.\n");
+ }
+ return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION, value);
+#endif
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - increase_saturation -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_saturation (mm_camera_lib_handle *lib_handle) {
+#if 0
+ saturation += CAMERA_SATURATION_STEP;
+ if (saturation > CAMERA_MAX_SATURATION) {
+ saturation = CAMERA_MAX_SATURATION;
+ printf("Reached max saturation. \n");
+ }
+ printf("Increase Saturation to %d\n", saturation);
+ return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_SATURATION, saturation);
+#endif
+ saturation += CAMERA_SATURATION_STEP;
+ if (saturation > CAMERA_MAX_SATURATION) {
+ saturation = CAMERA_MAX_SATURATION;
+ printf("Reached max saturation. \n");
+ }
+ printf("Increase saturation to %d\n", contrast);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_SATURATION,
+ &saturation,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_saturation -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_saturation (mm_camera_lib_handle *lib_handle) {
+#if 0
+ saturation -= CAMERA_SATURATION_STEP;
+ if (saturation < CAMERA_MIN_SATURATION) {
+ saturation = CAMERA_MIN_SATURATION;
+ printf("Reached min saturation. \n");
+ }
+ printf("Dcrease Saturation to %d\n", saturation);
+ return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_SATURATION, saturation);
+#endif
+ saturation -= CAMERA_SATURATION_STEP;
+ if (saturation < CAMERA_MIN_SATURATION) {
+ saturation = CAMERA_MIN_SATURATION;
+ printf("Reached min saturation. \n");
+ }
+ printf("decrease saturation to %d\n", contrast);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_SATURATION,
+ &saturation,
+ NULL);
+}
+
+
+int take_jpeg_snapshot(mm_camera_test_obj_t *test_obj, int is_burst_mode)
+{
+ LOGH("\nEnter take_jpeg_snapshot!!\n");
+ int rc = mm_app_take_picture (test_obj, (uint8_t)is_burst_mode);
+ if (MM_CAMERA_OK != rc) {
+ LOGE(" mm_app_take_picture() err=%d\n", rc);
+ }
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - main -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int main()
+{
+ char tc_buf[3];
+ int mode = 0;
+ int rc = 0;
+
+ printf("Please Select Execution Mode:\n");
+ printf("0: Menu Based 1: Regression\n");
+ fgets(tc_buf, 3, stdin);
+ mode = tc_buf[0] - '0';
+ if(mode == 0) {
+ printf("\nStarting Menu based!!\n");
+ } else if(mode == 1) {
+ printf("Starting Regression testing!!\n");
+ if(!mm_app_start_regression_test(1)) {
+ printf("\nRegressiion test passed!!\n");
+ return 0;
+ } else {
+ printf("\nRegression test failed!!\n");
+ exit(-1);
+ }
+ } else {
+ printf("\nPlease Enter 0 or 1\n");
+ printf("\nExisting the App!!\n");
+ exit(-1);
+ }
+
+
+ rc = submain();
+
+ printf("Exiting application\n");
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - set_whitebalance -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_whitebalance (mm_camera_lib_handle *lib_handle, int wb_action_param) {
+ cam_wb_mode_type type = 0;
+ switch (wb_action_param) {
+ case WB_AUTO:
+ printf("\n WB_AUTO\n");
+ type = CAM_WB_MODE_AUTO;
+ break;
+ case WB_INCANDESCENT:
+ printf("\n WB_INCANDESCENT\n");
+ type = CAM_WB_MODE_INCANDESCENT;
+ break;
+ case WB_FLUORESCENT:
+ printf("\n WB_FLUORESCENT\n");
+ type = CAM_WB_MODE_FLUORESCENT;
+ break;
+ case WB_WARM_FLUORESCENT:
+ printf("\n WB_WARM_FLUORESCENT\n");
+ type = CAM_WB_MODE_WARM_FLUORESCENT;
+ break;
+ case WB_DAYLIGHT:
+ printf("\n WB_DAYLIGHT\n");
+ type = CAM_WB_MODE_DAYLIGHT;
+ break;
+ case WB_CLOUDY_DAYLIGHT:
+ printf("\n WB_CLOUDY_DAYLIGHT\n");
+ type = CAM_WB_MODE_CLOUDY_DAYLIGHT;
+ break;
+ case WB_TWILIGHT:
+ printf("\n WB_TWILIGHT\n");
+ type = CAM_WB_MODE_TWILIGHT;
+ break;
+ case WB_SHADE:
+ printf("\n WB_SHADE\n");
+ type = CAM_WB_MODE_SHADE;
+ break;
+ default:
+ break;
+ }
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_WB,
+ &type,
+ NULL);
+}
+
+
+/*===========================================================================
+ * FUNCTION - set_exp_metering -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_exp_metering (mm_camera_lib_handle *lib_handle, int exp_metering_action_param) {
+ cam_auto_exposure_mode_type type = 0;
+ switch (exp_metering_action_param) {
+ case AUTO_EXP_FRAME_AVG:
+ printf("\nAUTO_EXP_FRAME_AVG\n");
+ type = CAM_AEC_MODE_FRAME_AVERAGE;
+ break;
+ case AUTO_EXP_CENTER_WEIGHTED:
+ printf("\n AUTO_EXP_CENTER_WEIGHTED\n");
+ type = CAM_AEC_MODE_CENTER_WEIGHTED;
+ break;
+ case AUTO_EXP_SPOT_METERING:
+ printf("\n AUTO_EXP_SPOT_METERING\n");
+ type = CAM_AEC_MODE_SPOT_METERING;
+ break;
+ case AUTO_EXP_SMART_METERING:
+ printf("\n AUTO_EXP_SMART_METERING\n");
+ type = CAM_AEC_MODE_SMART_METERING;
+ break;
+ case AUTO_EXP_USER_METERING:
+ printf("\n AUTO_EXP_USER_METERING\n");
+ type = CAM_AEC_MODE_USER_METERING;
+ break;
+ case AUTO_EXP_SPOT_METERING_ADV:
+ printf("\n AUTO_EXP_SPOT_METERING_ADV\n");
+ type = CAM_AEC_MODE_SPOT_METERING_ADV;
+ break;
+ case AUTO_EXP_CENTER_WEIGHTED_ADV:
+ printf("\n AUTO_EXP_CENTER_WEIGHTED_ADV\n");
+ type = CAM_AEC_MODE_CENTER_WEIGHTED_ADV;
+ break;
+ default:
+ break;
+ }
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_EXPOSURE_METERING,
+ &type,
+ NULL);
+}
+
+int get_ctrl_value (int ctrl_value_mode_param){
+#if 0
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ if (ctrl_value_mode_param == WHITE_BALANCE_STATE) {
+ printf("You chose WHITE_BALANCE_STATE\n");
+ ctrl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+ }
+ else if (ctrl_value_mode_param == WHITE_BALANCE_TEMPERATURE) {
+ printf("You chose WHITE_BALANCE_TEMPERATURE\n");
+ ctrl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ }
+ else if (ctrl_value_mode_param == BRIGHTNESS_CTRL) {
+ printf("You chose brightness value\n");
+ ctrl.id = V4L2_CID_BRIGHTNESS;
+ }
+ else if (ctrl_value_mode_param == EV) {
+ printf("You chose exposure value\n");
+ ctrl.id = V4L2_CID_EXPOSURE;
+ }
+ else if (ctrl_value_mode_param == CONTRAST_CTRL) {
+ printf("You chose contrast value\n");
+ ctrl.id = V4L2_CID_CONTRAST;
+ }
+ else if (ctrl_value_mode_param == SATURATION_CTRL) {
+ printf("You chose saturation value\n");
+ ctrl.id = V4L2_CID_SATURATION;
+ } else if (ctrl_value_mode_param == SHARPNESS_CTRL) {
+ printf("You chose sharpness value\n");
+ ctrl.id = V4L2_CID_SHARPNESS;
+ }
+
+ // rc = ioctl(camfd, VIDIOC_G_CTRL, &ctrl);
+ return rc;
+#endif
+ return ctrl_value_mode_param;
+}
+
+/*===========================================================================
+ * FUNCTION - toggle_afr -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int toggle_afr () {
+#if 0
+ if (fps_mode == FPS_MODE_AUTO) {
+ printf("\nSetting FPS_MODE_FIXED\n");
+ fps_mode = FPS_MODE_FIXED;
+ } else {
+ printf("\nSetting FPS_MODE_AUTO\n");
+ fps_mode = FPS_MODE_AUTO;
+ }
+ return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_FPS_MODE, fps_mode);
+#endif
+ return 0;
+}
+
+int set_zoom (mm_camera_lib_handle *lib_handle, int zoom_action_param) {
+
+ if (zoom_action_param == ZOOM_IN) {
+ zoom_level += ZOOM_STEP;
+ if (zoom_level > zoom_max_value)
+ zoom_level = zoom_max_value;
+ } else if (zoom_action_param == ZOOM_OUT) {
+ zoom_level -= ZOOM_STEP;
+ if (zoom_level < ZOOM_MIN_VALUE)
+ zoom_level = ZOOM_MIN_VALUE;
+ } else {
+ LOGD(" Invalid zoom_action_param value\n");
+ return -EINVAL;
+ }
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_ZOOM,
+ &zoom_level,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - set_iso -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_iso (mm_camera_lib_handle *lib_handle, int iso_action_param) {
+ cam_iso_mode_type type = 0;
+ switch (iso_action_param) {
+ case ISO_AUTO:
+ printf("\n ISO_AUTO\n");
+ type = CAM_ISO_MODE_AUTO;
+ break;
+ case ISO_DEBLUR:
+ printf("\n ISO_DEBLUR\n");
+ type = CAM_ISO_MODE_DEBLUR;
+ break;
+ case ISO_100:
+ printf("\n ISO_100\n");
+ type = CAM_ISO_MODE_100;
+ break;
+ case ISO_200:
+ printf("\n ISO_200\n");
+ type = CAM_ISO_MODE_200;
+ break;
+ case ISO_400:
+ printf("\n ISO_400\n");
+ type = CAM_ISO_MODE_400;
+ break;
+ case ISO_800:
+ printf("\n ISO_800\n");
+ type = CAM_ISO_MODE_800;
+ break;
+ case ISO_1600:
+ printf("\n ISO_1600\n");
+ type = CAM_ISO_MODE_1600;
+ break;
+ default:
+ break;
+ }
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_ISO,
+ &type,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - increase_sharpness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_sharpness (mm_camera_lib_handle *lib_handle) {
+ sharpness += CAMERA_SHARPNESS_STEP;
+ if (sharpness > CAMERA_MAX_SHARPNESS) {
+ sharpness = CAMERA_MAX_SHARPNESS;
+ printf("Reached max SHARPNESS. \n");
+ }
+ printf("Increase Sharpness to %d\n", sharpness);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_SHARPNESS,
+ &sharpness,
+ NULL);
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_sharpness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_sharpness (mm_camera_lib_handle *lib_handle) {
+ sharpness -= CAMERA_SHARPNESS_STEP;
+ if (sharpness < CAMERA_MIN_SHARPNESS) {
+ sharpness = CAMERA_MIN_SHARPNESS;
+ printf("Reached min SHARPNESS. \n");
+ }
+ printf("Decrease Sharpness to %d\n", sharpness);
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_SHARPNESS,
+ &sharpness,
+ NULL);
+}
+
+int set_flash_mode (mm_camera_lib_handle *lib_handle, int action_param) {
+ cam_flash_mode_t type = 0;
+ switch (action_param) {
+ case FLASH_MODE_OFF:
+ printf("\n FLASH_MODE_OFF\n");
+ type = CAM_FLASH_MODE_OFF;
+ break;
+ case FLASH_MODE_AUTO:
+ printf("\n FLASH_MODE_AUTO\n");
+ type = CAM_FLASH_MODE_AUTO;
+ break;
+ case FLASH_MODE_ON:
+ printf("\n FLASH_MODE_ON\n");
+ type = CAM_FLASH_MODE_ON;
+ break;
+ case FLASH_MODE_TORCH:
+ printf("\n FLASH_MODE_TORCH\n");
+ type = CAM_FLASH_MODE_TORCH;
+ break;
+ default:
+ break;
+ }
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_FLASH,
+ &type,
+ NULL);
+}
+
+int set_bestshot_mode(mm_camera_lib_handle *lib_handle, int action_param) {
+ cam_scene_mode_type type = 0;
+ switch (action_param) {
+ case BESTSHOT_AUTO:
+ printf("\n BEST SHOT AUTO\n");
+ type = CAM_SCENE_MODE_OFF;
+ break;
+ case BESTSHOT_ACTION:
+ printf("\n BEST SHOT ACTION\n");
+ type = CAM_SCENE_MODE_ACTION;
+ break;
+ case BESTSHOT_PORTRAIT:
+ printf("\n BEST SHOT PORTRAIT\n");
+ type = CAM_SCENE_MODE_PORTRAIT;
+ break;
+ case BESTSHOT_LANDSCAPE:
+ printf("\n BEST SHOT LANDSCAPE\n");
+ type = CAM_SCENE_MODE_LANDSCAPE;
+ break;
+ case BESTSHOT_NIGHT:
+ printf("\n BEST SHOT NIGHT\n");
+ type = CAM_SCENE_MODE_NIGHT;
+ break;
+ case BESTSHOT_NIGHT_PORTRAIT:
+ printf("\n BEST SHOT NIGHT PORTRAIT\n");
+ type = CAM_SCENE_MODE_NIGHT_PORTRAIT;
+ break;
+ case BESTSHOT_THEATRE:
+ printf("\n BEST SHOT THREATRE\n");
+ type = CAM_SCENE_MODE_THEATRE;
+ break;
+ case BESTSHOT_BEACH:
+ printf("\n BEST SHOT BEACH\n");
+ type = CAM_SCENE_MODE_BEACH;
+ break;
+ case BESTSHOT_SNOW:
+ printf("\n BEST SHOT SNOW\n");
+ type = CAM_SCENE_MODE_SNOW;
+ break;
+ case BESTSHOT_SUNSET:
+ printf("\n BEST SHOT SUNSET\n");
+ type = CAM_SCENE_MODE_SUNSET;
+ break;
+ case BESTSHOT_ANTISHAKE:
+ printf("\n BEST SHOT ANTISHAKE\n");
+ type = CAM_SCENE_MODE_ANTISHAKE;
+ break;
+ case BESTSHOT_FIREWORKS:
+ printf("\n BEST SHOT FIREWORKS\n");
+ type = CAM_SCENE_MODE_FIREWORKS;
+ break;
+ case BESTSHOT_SPORTS:
+ printf("\n BEST SHOT SPORTS\n");
+ type = CAM_SCENE_MODE_SPORTS;
+ break;
+ case BESTSHOT_PARTY:
+ printf("\n BEST SHOT PARTY\n");
+ type = CAM_SCENE_MODE_PARTY;
+ break;
+ case BESTSHOT_CANDLELIGHT:
+ printf("\n BEST SHOT CANDLELIGHT\n");
+ type = CAM_SCENE_MODE_CANDLELIGHT;
+ break;
+ case BESTSHOT_ASD:
+ printf("\n BEST SHOT ASD\n");
+ type = CAM_SCENE_MODE_AUTO;
+ break;
+ case BESTSHOT_BACKLIGHT:
+ printf("\n BEST SHOT BACKLIGHT\n");
+ type = CAM_SCENE_MODE_BACKLIGHT;
+ break;
+ case BESTSHOT_FLOWERS:
+ printf("\n BEST SHOT FLOWERS\n");
+ type = CAM_SCENE_MODE_FLOWERS;
+ break;
+ case BESTSHOT_AR:
+ printf("\n BEST SHOT AR\n");
+ type = CAM_SCENE_MODE_AR;
+ break;
+ case BESTSHOT_HDR:
+ printf("\n BEST SHOT HDR\n");
+ type = CAM_SCENE_MODE_OFF;
+ break;
+ default:
+ break;
+ }
+ return mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_BESTSHOT,
+ &type,
+ NULL);
+}
+/*===========================================================================
+ * FUNCTION - print_current_menu -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int print_current_menu (menu_id_change_t current_menu_id) {
+ if (current_menu_id == MENU_ID_MAIN) {
+ print_menu_preview_video ();
+ } else if (current_menu_id == MENU_ID_WHITEBALANCECHANGE) {
+ camera_preview_video_wb_change_tbl();
+ } else if (current_menu_id == MENU_ID_EXPMETERINGCHANGE) {
+ camera_preview_video_exp_metering_change_tbl();
+ } else if (current_menu_id == MENU_ID_GET_CTRL_VALUE) {
+ camera_preview_video_get_ctrl_value_tbl();
+ } else if (current_menu_id == MENU_ID_ISOCHANGE) {
+ camera_preview_video_iso_change_tbl();
+ } else if (current_menu_id == MENU_ID_BRIGHTNESSCHANGE) {
+ camera_brightness_change_tbl ();
+ } else if (current_menu_id == MENU_ID_CONTRASTCHANGE) {
+ camera_contrast_change_tbl ();
+ } else if (current_menu_id == MENU_ID_EVCHANGE) {
+ camera_EV_change_tbl ();
+ } else if (current_menu_id == MENU_ID_SATURATIONCHANGE) {
+ camera_saturation_change_tbl ();
+ } else if (current_menu_id == MENU_ID_ZOOMCHANGE) {
+ camera_preview_video_zoom_change_tbl();
+ } else if (current_menu_id == MENU_ID_SHARPNESSCHANGE) {
+ camera_preview_video_sharpness_change_tbl();
+ } else if (current_menu_id == MENU_ID_BESTSHOT) {
+ camera_set_bestshot_tbl();
+ } else if (current_menu_id == MENU_ID_FLASHMODE) {
+ camera_set_flashmode_tbl();
+ } else if (current_menu_id == MENU_ID_SENSORS ) {
+ camera_sensors_tbl();
+ } else if (current_menu_id == MENU_ID_SWITCH_RES ) {
+ camera_resolution_change_tbl();
+ }
+
+ return 0;
+}
+
+int filter_resolutions(mm_camera_lib_handle *lib_handle,
+ DIMENSION_TBL_T *tbl,
+ size_t tbl_size)
+{
+ size_t i, j;
+ cam_capability_t camera_cap;
+ int rc = 0;
+
+ if ( ( NULL == lib_handle ) || ( NULL == tbl ) ) {
+ return -1;
+ }
+
+ rc = mm_camera_lib_get_caps(lib_handle, &camera_cap);
+ if ( MM_CAMERA_OK != rc ) {
+ LOGE("mm_camera_lib_get_caps() err=%d\n", rc);
+ return -1;
+ }
+
+ for( i = 0 ; i < tbl_size ; i++ ) {
+ for( j = 0; j < camera_cap.picture_sizes_tbl_cnt; j++ ) {
+ if ( ( tbl[i].width == camera_cap.picture_sizes_tbl[j].width ) &&
+ ( tbl[i].height == camera_cap.picture_sizes_tbl[j].height ) ) {
+ tbl[i].supported = 1;
+ rc = (int)i;
+ break;
+ }
+ }
+ }
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : enableAFR
+ *
+ * DESCRIPTION: This function will go through the list
+ * of supported FPS ranges and select the
+ * one which has maximum range
+ *
+ * PARAMETERS :
+ * @lib_handle : camera test library handle
+ *
+ * RETURN : uint32_t type of stream handle
+ * MM_CAMERA_OK -- Success
+ * !=MM_CAMERA_OK -- Error status
+ *==========================================================================*/
+int enableAFR(mm_camera_lib_handle *lib_handle)
+{
+ size_t i, j;
+ float max_range = 0.0f;
+ cam_capability_t cap;
+ int rc = MM_CAMERA_OK;
+
+ if ( NULL == lib_handle ) {
+ return MM_CAMERA_E_INVALID_INPUT;
+ }
+
+ rc = mm_camera_lib_get_caps(lib_handle, &cap);
+ if ( MM_CAMERA_OK != rc ) {
+ LOGE("mm_camera_lib_get_caps() err=%d\n", rc);
+ return rc;
+ }
+
+ for( i = 0, j = 0 ; i < cap.fps_ranges_tbl_cnt ; i++ ) {
+ if ( max_range < (cap.fps_ranges_tbl[i].max_fps - cap.fps_ranges_tbl[i].min_fps) ) {
+ j = i;
+ }
+ }
+
+ rc = mm_camera_lib_send_command(lib_handle,
+ MM_CAMERA_LIB_FPS_RANGE,
+ &cap.fps_ranges_tbl[j],
+ NULL);
+
+ LOGE("FPS range [%5.2f:%5.2f] rc = %d",
+ cap.fps_ranges_tbl[j].min_fps,
+ cap.fps_ranges_tbl[j].max_fps,
+ rc);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - submain -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+static int submain()
+{
+ int rc = 0;
+ char tc_buf[3];
+ menu_id_change_t current_menu_id = MENU_ID_MAIN, next_menu_id;
+ camera_action_t action_id;
+ int action_param;
+ uint8_t previewing = 0;
+ int isZSL = 0;
+ uint8_t wnr_enabled = 0;
+ mm_camera_lib_handle lib_handle;
+ int num_cameras;
+ int available_sensors =
+ (int)(sizeof(sensor_tbl) / sizeof(sensor_tbl[0]));
+ int available_snap_sizes =
+ (int)(sizeof(dimension_tbl)/sizeof(dimension_tbl[0]));
+ int i,c;
+ mm_camera_lib_snapshot_params snap_dim;
+ snap_dim.width = DEFAULT_SNAPSHOT_WIDTH;
+ snap_dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+ cam_scene_mode_type default_scene= CAM_SCENE_MODE_OFF;
+ int set_tintless= 0;
+
+ mm_camera_test_obj_t test_obj;
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+
+ rc = mm_camera_lib_open(&lib_handle, 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_open() err=%d\n", rc);
+ return -1;
+ }
+
+ num_cameras = mm_camera_lib_number_of_cameras(&lib_handle);
+ if ( 0 >= num_cameras ) {
+ LOGE(" No camera sensors reported!");
+ rc = -1;
+ goto ERROR;
+ } else if ( 1 <= num_cameras ) {
+ c = MIN(num_cameras, available_sensors);
+ for ( i = 0 ; i < c ; i++ ) {
+ sensor_tbl[i].present = 1;
+ }
+ current_menu_id = MENU_ID_SENSORS;
+ } else {
+ i = filter_resolutions(&lib_handle,
+ dimension_tbl,
+ (size_t)available_snap_sizes);
+ if ( ( i < 0 ) || ( i >= available_snap_sizes ) ) {
+ LOGE("filter_resolutions()\n");
+ goto ERROR;
+ }
+ snap_dim.width = dimension_tbl[i].width;
+ snap_dim.height = dimension_tbl[i].height;
+
+ rc = enableAFR(&lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("enableAFR() err=%d\n", rc);
+ goto ERROR;
+ }
+
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_BESTSHOT,
+ &default_scene,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ }
+ /*start the eztune server*/
+ LOGH("Starting eztune Server \n");
+ eztune_server_start(&lib_handle);
+
+ do {
+ print_current_menu (current_menu_id);
+ fgets(tc_buf, 3, stdin);
+
+ next_menu_id = next_menu(current_menu_id, tc_buf[0], & action_id, & action_param);
+
+ if (next_menu_id != MENU_ID_INVALID) {
+ current_menu_id = next_menu_id;
+ }
+ if (action_id == ACTION_NO_ACTION) {
+ continue;
+ }
+
+ switch(action_id) {
+ case ACTION_START_PREVIEW:
+ LOGE("ACTION_START_PREVIEW \n");
+ rc = mm_camera_lib_start_stream(&lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_start_stream() err=%d\n", rc);
+ goto ERROR;
+ }
+ previewing = 1;
+ break;
+
+ case ACTION_STOP_PREVIEW:
+ LOGD("ACTION_STOP_PREVIEW \n");
+ rc = mm_camera_lib_stop_stream(&lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_stop_stream() err=%d\n", rc);
+ goto ERROR;
+ }
+ previewing = 0;
+ break;
+
+ case ACTION_SET_WHITE_BALANCE:
+ LOGD("Selection for the White Balance changes\n");
+ set_whitebalance(&lib_handle, action_param);
+ break;
+
+ case ACTION_SET_TINTLESS_ENABLE:
+ LOGD("Selection for the Tintless enable changes\n");
+ set_tintless = 1;
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_SET_TINTLESS,
+ &set_tintless,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+
+ case ACTION_SET_TINTLESS_DISABLE:
+ LOGD("Selection for the Tintless disable changes\n");
+ set_tintless = 0;
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_SET_TINTLESS,
+ &set_tintless,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+
+ case ACTION_SET_EXP_METERING:
+ LOGD("Selection for the Exposure Metering changes\n");
+ set_exp_metering(&lib_handle, action_param);
+ break;
+
+ case ACTION_GET_CTRL_VALUE:
+ LOGD("Selection for getting control value\n");
+ get_ctrl_value(action_param);
+ break;
+
+ case ACTION_BRIGHTNESS_INCREASE:
+ printf("Increase brightness\n");
+ increase_brightness(&lib_handle);
+ break;
+
+ case ACTION_BRIGHTNESS_DECREASE:
+ printf("Decrease brightness\n");
+ decrease_brightness(&lib_handle);
+ break;
+
+ case ACTION_CONTRAST_INCREASE:
+ LOGD("Selection for the contrast increase\n");
+ increase_contrast (&lib_handle);
+ break;
+
+ case ACTION_CONTRAST_DECREASE:
+ LOGD("Selection for the contrast decrease\n");
+ decrease_contrast (&lib_handle);
+ break;
+
+ case ACTION_EV_INCREASE:
+ LOGD("Selection for the EV increase\n");
+ increase_EV ();
+ break;
+
+ case ACTION_EV_DECREASE:
+ LOGD("Selection for the EV decrease\n");
+ decrease_EV ();
+ break;
+
+ case ACTION_SATURATION_INCREASE:
+ LOGD("Selection for the EV increase\n");
+ increase_saturation (&lib_handle);
+ break;
+
+ case ACTION_SATURATION_DECREASE:
+ LOGD("Selection for the EV decrease\n");
+ decrease_saturation (&lib_handle);
+ break;
+
+ case ACTION_TOGGLE_AFR:
+ LOGD("Select for auto frame rate toggling\n");
+ toggle_afr();
+ break;
+
+ case ACTION_SET_ISO:
+ LOGD("Select for ISO changes\n");
+ set_iso(&lib_handle, action_param);
+ break;
+
+ case ACTION_SET_ZOOM:
+ LOGD("Selection for the zoom direction changes\n");
+ set_zoom(&lib_handle, action_param);
+ break;
+
+ case ACTION_SHARPNESS_INCREASE:
+ LOGD("Selection for sharpness increase\n");
+ increase_sharpness(&lib_handle);
+ break;
+
+ case ACTION_SHARPNESS_DECREASE:
+ LOGD("Selection for sharpness decrease\n");
+ decrease_sharpness(&lib_handle);
+ break;
+
+ case ACTION_SET_BESTSHOT_MODE:
+ LOGD("Selection for bestshot\n");
+ set_bestshot_mode(&lib_handle, action_param);
+ break;
+
+ case ACTION_SET_FLASH_MODE:
+ printf("\n Selection for flashmode\n");
+ set_flash_mode(&lib_handle, action_param);
+ break;
+
+ case ACTION_SWITCH_CAMERA:
+ rc = mm_camera_lib_close(&lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_close() err=%d\n", rc);
+ goto ERROR;
+ }
+
+ rc = mm_camera_lib_open(&lib_handle, action_param);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_open() err=%d\n", rc);
+ goto ERROR;
+ }
+
+ i = filter_resolutions(&lib_handle,
+ dimension_tbl,
+ sizeof(dimension_tbl)/sizeof(dimension_tbl[0]));
+ if ( ( i < 0 ) || ( i >= available_snap_sizes ) ) {
+ LOGE("filter_resolutions()\n");
+ goto ERROR;
+ }
+ snap_dim.width = dimension_tbl[i].width;
+ snap_dim.height = dimension_tbl[i].height;
+
+ rc = enableAFR(&lib_handle);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("enableAFR() err=%d\n", rc);
+ goto ERROR;
+ }
+
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_BESTSHOT,
+ &default_scene,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+
+ case ACTION_TOGGLE_ZSL:
+ printf("ZSL Toggle !!!\n");
+ isZSL = !isZSL;
+ if ( isZSL ) {
+ printf("ZSL on !!!\n");
+ } else {
+ printf("ZSL off !!!\n");
+ }
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_ZSL_ENABLE,
+ &isZSL,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+
+ case ACTION_TAKE_RAW_SNAPSHOT:
+ LOGH("\n Take RAW snapshot\n");
+
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_DO_AF,
+ NULL,
+ NULL);
+
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_RAW_CAPTURE,
+ NULL,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+
+ case ACTION_TAKE_JPEG_SNAPSHOT:
+ LOGH("\n Take JPEG snapshot\n");
+
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_JPEG_CAPTURE,
+ &snap_dim,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+ case ACTION_SWITCH_RESOLUTION:
+ printf("\n Switch snapshot resolution to %dx%d\n",
+ dimension_tbl[action_param].width,
+ dimension_tbl[action_param].height);
+ snap_dim.width = dimension_tbl[action_param].width;
+ snap_dim.height = dimension_tbl[action_param].height;
+ break;
+
+ case ACTION_START_RECORDING:
+ LOGD("Start recording action\n");
+#if 0
+ if (mm_app_start_video(cam_id) < 0)
+ goto ERROR;
+ is_rec = 1;
+#endif
+ break;
+ case ACTION_STOP_RECORDING:
+ LOGD("Stop recording action\n");
+#if 0
+ if(is_rec) {
+ if (mm_app_stop_video(cam_id) < 0)
+ goto ERROR;
+ is_rec = 0;
+ }
+#endif
+ break;
+ case ACTION_TAKE_LIVE_SNAPSHOT:
+ printf("Selection for live shot\n");
+#if 0
+ if(is_rec)
+ mm_app_take_live_snapshot(cam_id);
+ else
+ printf("\n !!! Use live snapshot option while recording only !!!\n");
+#endif
+ break;
+
+ case ACTION_TOGGLE_WNR:
+ wnr_enabled = !wnr_enabled;
+ printf("WNR Enabled = %d\n", wnr_enabled);
+ rc = mm_camera_lib_send_command(&lib_handle,
+ MM_CAMERA_LIB_WNR_ENABLE,
+ &wnr_enabled,
+ NULL);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_camera_lib_send_command() err=%d\n", rc);
+ goto ERROR;
+ }
+ break;
+
+ case ACTION_EXIT:
+ printf("Exiting....\n");
+ break;
+ case ACTION_NO_ACTION:
+ printf("Go back to main menu");
+ break;
+
+ default:
+ printf("\n\n!!!!!WRONG INPUT: %d!!!!\n", action_id);
+ break;
+ }
+
+ usleep(1000 * 1000);
+ LOGD("action_id = %d\n", action_id);
+
+ } while (action_id != ACTION_EXIT);
+ action_id = ACTION_NO_ACTION;
+
+ mm_camera_lib_close(&lib_handle);
+ return 0;
+
+ERROR:
+
+ mm_camera_lib_close(&lib_handle);
+
+ return rc;
+}
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
new file mode 100644
index 0000000..b20c95d
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
@@ -0,0 +1,1313 @@
+/*
+Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// System dependencies
+#include <assert.h>
+#include <errno.h>
+#include <fcntl.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+#include <assert.h>
+#include <sys/mman.h>
+#include <semaphore.h>
+
+static void mm_app_metadata_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ uint32_t i = 0;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *p_stream = NULL;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_buf_def_t *frame;
+ metadata_buffer_t *pMetadata;
+
+ if (NULL == bufs || NULL == user_data) {
+ LOGE("bufs or user_data are not valid ");
+ return;
+ }
+ frame = bufs->bufs[0];
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+
+ if (NULL == channel) {
+ LOGE("Channel object is NULL ");
+ return;
+ }
+
+ /* find preview stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+ p_stream = &channel->streams[i];
+ break;
+ }
+ }
+
+ if (NULL == p_stream) {
+ LOGE("cannot find metadata stream");
+ return;
+ }
+
+ /* find preview frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+ frame = bufs->bufs[i];
+ break;
+ }
+ }
+
+ if (pme->metadata == NULL) {
+ /* The app will free the meta data, we don't need to bother here */
+ pme->metadata = malloc(sizeof(metadata_buffer_t));
+ if (NULL == pme->metadata) {
+ LOGE("Canot allocate metadata memory\n");
+ return;
+ }
+ }
+ memcpy(pme->metadata, frame->buffer, sizeof(metadata_buffer_t));
+
+ pMetadata = (metadata_buffer_t *)frame->buffer;
+ IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetadata) {
+ if ((cam_af_state_t)(*afState) == CAM_AF_STATE_FOCUSED_LOCKED ||
+ (cam_af_state_t)(*afState) == CAM_AF_STATE_NOT_FOCUSED_LOCKED) {
+ LOGE("AutoFocus Done Call Back Received\n");
+ mm_camera_app_done();
+ } else if ((cam_af_state_t)(*afState) == CAM_AF_STATE_NOT_FOCUSED_LOCKED) {
+ LOGE("AutoFocus failed\n");
+ mm_camera_app_done();
+ }
+ }
+
+ if (pme->user_metadata_cb) {
+ LOGD("[DBG] %s, user defined own metadata cb. calling it...");
+ pme->user_metadata_cb(frame);
+ }
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ frame)) {
+ LOGE("Failed in Preview Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+ ION_IOC_INV_CACHES);
+}
+
+static void mm_app_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+
+ int rc = 0;
+ uint32_t i = 0;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *p_stream = NULL;
+ mm_camera_stream_t *m_stream = NULL;
+ mm_camera_buf_def_t *p_frame = NULL;
+ mm_camera_buf_def_t *m_frame = NULL;
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+ if (NULL == channel) {
+ LOGE("Wrong channel id (%d)", bufs->ch_id);
+ rc = -1;
+ goto error;
+ }
+
+ /* find snapshot stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+ m_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL == m_stream) {
+ LOGE("cannot find snapshot stream");
+ rc = -1;
+ goto error;
+ }
+
+ /* find snapshot frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+ m_frame = bufs->bufs[i];
+ break;
+ }
+ }
+ if (NULL == m_frame) {
+ LOGE("main frame is NULL");
+ rc = -1;
+ goto error;
+ }
+
+ mm_app_dump_frame(m_frame, "main", "yuv", m_frame->frame_idx);
+
+ /* find postview stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_POSTVIEW) {
+ p_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL != p_stream) {
+ /* find preview frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+ p_frame = bufs->bufs[i];
+ break;
+ }
+ }
+ if (NULL != p_frame) {
+ mm_app_dump_frame(p_frame, "postview", "yuv", p_frame->frame_idx);
+ }
+ }
+
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+ ION_IOC_CLEAN_INV_CACHES);
+
+ pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+ if ( NULL == pme->jpeg_buf.buf.buffer ) {
+ LOGE("error allocating jpeg output buffer");
+ goto error;
+ }
+
+ pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+ /* create a new jpeg encoding session */
+ rc = createEncodingSession(pme, m_stream, m_frame);
+ if (0 != rc) {
+ LOGE("error creating jpeg session");
+ free(pme->jpeg_buf.buf.buffer);
+ goto error;
+ }
+
+ /* start jpeg encoding job */
+ rc = encodeData(pme, bufs, m_stream);
+ if (0 != rc) {
+ LOGE("error creating jpeg session");
+ free(pme->jpeg_buf.buf.buffer);
+ goto error;
+ }
+
+error:
+ /* buf done rcvd frames in error case */
+ if ( 0 != rc ) {
+ for (i=0; i<bufs->num_bufs; i++) {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ bufs->bufs[i])) {
+ LOGE("Failed in Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)bufs->bufs[i]->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+ }
+
+ LOGD(" END\n");
+}
+
+static void mm_app_preview_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ uint32_t i = 0;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *p_stream = NULL;
+ mm_camera_buf_def_t *frame = NULL;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+ if (NULL == bufs || NULL == user_data) {
+ LOGE("bufs or user_data are not valid ");
+ return;
+ }
+
+ frame = bufs->bufs[0];
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+ if (NULL == channel) {
+ LOGE("Channel object is NULL ");
+ return;
+ }
+ /* find preview stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_PREVIEW) {
+ p_stream = &channel->streams[i];
+ break;
+ }
+ }
+
+ if (NULL == p_stream) {
+ LOGE("cannot find preview stream");
+ return;
+ }
+
+ /* find preview frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+ frame = bufs->bufs[i];
+ break;
+ }
+ }
+
+ if ( 0 < pme->fb_fd ) {
+ mm_app_overlay_display(pme, frame->fd);
+ }
+#ifdef DUMP_PRV_IN_FILE
+ {
+ char file_name[64];
+ snprintf(file_name, sizeof(file_name), "P_C%d", pme->cam->camera_handle);
+ mm_app_dump_frame(frame, file_name, "yuv", frame->frame_idx);
+ }
+#endif
+ if (pme->user_preview_cb) {
+ LOGE("[DBG] %s, user defined own preview cb. calling it...");
+ pme->user_preview_cb(frame);
+ }
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ frame)) {
+ LOGE("Failed in Preview Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+ ION_IOC_INV_CACHES);
+
+ LOGD(" END\n");
+}
+
+static void mm_app_zsl_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ int rc = MM_CAMERA_OK;
+ uint32_t i = 0;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *p_stream = NULL;
+ mm_camera_stream_t *m_stream = NULL;
+ mm_camera_stream_t *md_stream = NULL;
+ mm_camera_buf_def_t *p_frame = NULL;
+ mm_camera_buf_def_t *m_frame = NULL;
+ mm_camera_buf_def_t *md_frame = NULL;
+
+ LOGD(" BEGIN\n");
+
+ if (NULL == bufs || NULL == user_data) {
+ LOGE("bufs or user_data are not valid ");
+ return;
+ }
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+ if (NULL == channel) {
+ LOGE("Wrong channel id (%d)", bufs->ch_id);
+ return;
+ }
+
+ /* find preview stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_PREVIEW) {
+ p_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL == p_stream) {
+ LOGE("cannot find preview stream");
+ return;
+ }
+
+ /* find snapshot stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+ m_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL == m_stream) {
+ LOGE("cannot find snapshot stream");
+ return;
+ }
+
+ /* find metadata stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+ md_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL == md_stream) {
+ LOGE("cannot find metadata stream");
+ }
+
+ /* find preview frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+ p_frame = bufs->bufs[i];
+ break;
+ }
+ }
+
+ if(md_stream) {
+ /* find metadata frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == md_stream->s_id) {
+ md_frame = bufs->bufs[i];
+ break;
+ }
+ }
+ if (!md_frame) {
+ LOGE("md_frame is null\n");
+ return;
+ }
+ if (!pme->metadata) {
+ /* App will free the metadata */
+ pme->metadata = malloc(sizeof(metadata_buffer_t));
+ if (!pme->metadata) {
+ ALOGE("not enough memory\n");
+ return;
+ }
+ }
+
+ memcpy(pme->metadata , md_frame->buffer, sizeof(metadata_buffer_t));
+ }
+ /* find snapshot frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+ m_frame = bufs->bufs[i];
+ break;
+ }
+ }
+
+ if (!m_frame || !p_frame) {
+ LOGE("cannot find preview/snapshot frame");
+ return;
+ }
+
+ LOGD(" ZSL CB with fb_fd = %d, m_frame = %p, p_frame = %p \n",
+ pme->fb_fd,
+ m_frame,
+ p_frame);
+
+ if ( 0 < pme->fb_fd ) {
+ mm_app_overlay_display(pme, p_frame->fd);
+ }/* else {
+ mm_app_dump_frame(p_frame, "zsl_preview", "yuv", p_frame->frame_idx);
+ mm_app_dump_frame(m_frame, "zsl_main", "yuv", m_frame->frame_idx);
+ }*/
+
+ if ( pme->enable_reproc && ( NULL != pme->reproc_stream ) ) {
+
+ if (NULL != md_frame) {
+ rc = mm_app_do_reprocess(pme,
+ m_frame,
+ md_frame->buf_idx,
+ bufs,
+ md_stream);
+
+ if (MM_CAMERA_OK != rc ) {
+ LOGE("reprocess failed rc = %d", rc);
+ }
+ } else {
+ LOGE("md_frame is null\n");
+ }
+
+ return;
+ }
+
+ if ( pme->encodeJpeg ) {
+ pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+ if ( NULL == pme->jpeg_buf.buf.buffer ) {
+ LOGE("error allocating jpeg output buffer");
+ goto exit;
+ }
+
+ pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+ /* create a new jpeg encoding session */
+ rc = createEncodingSession(pme, m_stream, m_frame);
+ if (0 != rc) {
+ LOGE("error creating jpeg session");
+ free(pme->jpeg_buf.buf.buffer);
+ goto exit;
+ }
+
+ /* start jpeg encoding job */
+ rc = encodeData(pme, bufs, m_stream);
+ pme->encodeJpeg = 0;
+ } else {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ m_frame)) {
+ LOGE("Failed in main Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+
+exit:
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ p_frame)) {
+ LOGE("Failed in preview Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)p_frame->mem_info,
+ ION_IOC_INV_CACHES);
+
+ if(md_frame) {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ md_frame)) {
+ LOGE("Failed in metadata Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)md_frame->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+
+ LOGD(" END\n");
+}
+
+mm_camera_stream_t * mm_app_add_metadata_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_METADATA;
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+ stream->s_config.stream_info->dim.width = sizeof(metadata_buffer_t);
+ stream->s_config.stream_info->dim.height = 1;
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+cam_dimension_t mm_app_get_analysis_stream_dim(
+ const mm_camera_test_obj_t *test_obj,
+ const cam_dimension_t* preview_dim)
+{
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+ cam_dimension_t max_analysis_dim =
+ cam_cap->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].analysis_max_res;
+ cam_dimension_t analysis_dim = {0, 0};
+
+ if (preview_dim->width > max_analysis_dim.width ||
+ preview_dim->height > max_analysis_dim.height) {
+ double max_ratio, requested_ratio;
+
+ max_ratio = (double)max_analysis_dim.width / (double)max_analysis_dim.height;
+ requested_ratio = (double)preview_dim->width / (double)preview_dim->height;
+
+ if (max_ratio < requested_ratio) {
+ analysis_dim.width = analysis_dim.width;
+ analysis_dim.height = (int32_t)((double)analysis_dim.width / requested_ratio);
+ } else {
+ analysis_dim.height = analysis_dim.height;
+ analysis_dim.width = (int32_t)((double)analysis_dim.height * requested_ratio);
+ }
+ analysis_dim.width &= ~0x1;
+ analysis_dim.height &= ~0x1;
+ } else {
+ analysis_dim = *preview_dim;
+ }
+
+ LOGI("analysis stream dim (%d x %d)\n", analysis_dim.width, analysis_dim.height);
+ return analysis_dim;
+}
+
+mm_camera_stream_t * mm_app_add_analysis_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+ cam_dimension_t preview_dim = {0, 0};
+ cam_dimension_t analysis_dim = {0, 0};
+
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ return NULL;
+ }
+
+ if ((test_obj->preview_resolution.user_input_display_width == 0) ||
+ ( test_obj->preview_resolution.user_input_display_height == 0)) {
+ preview_dim.width = DEFAULT_PREVIEW_WIDTH;
+ preview_dim.height = DEFAULT_PREVIEW_HEIGHT;
+ } else {
+ preview_dim.width = test_obj->preview_resolution.user_input_display_width;
+ preview_dim.height = test_obj->preview_resolution.user_input_display_height;
+ }
+
+ analysis_dim = mm_app_get_analysis_stream_dim(test_obj, &preview_dim);
+ LOGI("analysis stream dimesion: %d x %d\n",
+ analysis_dim.width, analysis_dim.height);
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_ANALYSIS;
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+ stream->s_config.stream_info->dim = analysis_dim;
+ stream->s_config.padding_info =
+ cam_cap->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].analysis_padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_stream_t * mm_app_add_preview_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+ cam_dimension_t preview_dim = {0, 0};
+ cam_dimension_t analysis_dim = {0, 0};
+
+ if ((test_obj->preview_resolution.user_input_display_width == 0) ||
+ ( test_obj->preview_resolution.user_input_display_height == 0)) {
+ preview_dim.width = DEFAULT_PREVIEW_WIDTH;
+ preview_dim.height = DEFAULT_PREVIEW_HEIGHT;
+ } else {
+ preview_dim.width = test_obj->preview_resolution.user_input_display_width;
+ preview_dim.height = test_obj->preview_resolution.user_input_display_height;
+ }
+ LOGI("preview dimesion: %d x %d\n", preview_dim.width, preview_dim.height);
+
+ analysis_dim = mm_app_get_analysis_stream_dim(test_obj, &preview_dim);
+ LOGI("analysis stream dimesion: %d x %d\n",
+ analysis_dim.width, analysis_dim.height);
+
+ uint32_t analysis_pp_mask = cam_cap->qcom_supported_feature_mask &
+ (CAM_QCOM_FEATURE_SHARPNESS |
+ CAM_QCOM_FEATURE_EFFECT |
+ CAM_QCOM_FEATURE_DENOISE2D);
+ LOGI("analysis stream pp mask:%x\n", analysis_pp_mask);
+
+ cam_stream_size_info_t abc ;
+ memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+ abc.num_streams = 2;
+ abc.postprocess_mask[0] = 2178;
+ abc.stream_sizes[0].width = preview_dim.width;
+ abc.stream_sizes[0].height = preview_dim.height;
+ abc.type[0] = CAM_STREAM_TYPE_PREVIEW;
+
+ abc.postprocess_mask[1] = analysis_pp_mask;
+ abc.stream_sizes[1].width = analysis_dim.width;
+ abc.stream_sizes[1].height = analysis_dim.height;
+ abc.type[1] = CAM_STREAM_TYPE_ANALYSIS;
+
+ abc.buffer_info.min_buffers = 10;
+ abc.buffer_info.max_buffers = 10;
+ abc.is_type[0] = IS_TYPE_NONE;
+
+ rc = setmetainfoCommand(test_obj, &abc);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("meta info command failed\n");
+ }
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ return NULL;
+ }
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_PREVIEW;
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+
+ stream->s_config.stream_info->dim.width = preview_dim.width;
+ stream->s_config.stream_info->dim.height = preview_dim.height;
+
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_stream_t * mm_app_add_raw_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+ cam_stream_size_info_t abc ;
+ memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+ abc.num_streams = 1;
+ abc.postprocess_mask[0] = 0;
+
+ if ( test_obj->buffer_width == 0 || test_obj->buffer_height == 0 ) {
+ abc.stream_sizes[0].width = DEFAULT_SNAPSHOT_WIDTH;
+ abc.stream_sizes[0].height = DEFAULT_SNAPSHOT_HEIGHT;
+ } else {
+ abc.stream_sizes[0].width = (int32_t)test_obj->buffer_width;
+ abc.stream_sizes[0].height = (int32_t)test_obj->buffer_height;
+ }
+ abc.type[0] = CAM_STREAM_TYPE_RAW;
+
+ abc.buffer_info.min_buffers = num_bufs;
+ abc.buffer_info.max_buffers = num_bufs;
+ abc.is_type[0] = IS_TYPE_NONE;
+
+ rc = setmetainfoCommand(test_obj, &abc);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("meta info command failed\n");
+ }
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_RAW;
+ if (num_burst == 0) {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+ stream->s_config.stream_info->num_of_burst = num_burst;
+ }
+ stream->s_config.stream_info->fmt = test_obj->buffer_format;
+ if ( test_obj->buffer_width == 0 || test_obj->buffer_height == 0 ) {
+ stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+ stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+ } else {
+ stream->s_config.stream_info->dim.width = (int32_t)test_obj->buffer_width;
+ stream->s_config.stream_info->dim.height = (int32_t)test_obj->buffer_height;
+ }
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_stream_t * mm_app_add_snapshot_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+ cam_stream_size_info_t abc_snap ;
+ memset (&abc_snap , 0, sizeof (cam_stream_size_info_t));
+
+ abc_snap.num_streams = 2;
+ abc_snap.postprocess_mask[1] = 2178;
+ abc_snap.stream_sizes[1].width = DEFAULT_PREVIEW_WIDTH;
+ abc_snap.stream_sizes[1].height = DEFAULT_PREVIEW_HEIGHT;
+ abc_snap.type[1] = CAM_STREAM_TYPE_POSTVIEW;
+
+ abc_snap.postprocess_mask[0] = 0;
+ abc_snap.stream_sizes[0].width = DEFAULT_SNAPSHOT_WIDTH;
+ abc_snap.stream_sizes[0].height = DEFAULT_SNAPSHOT_HEIGHT;
+ abc_snap.type[0] = CAM_STREAM_TYPE_SNAPSHOT;
+
+ abc_snap.buffer_info.min_buffers = 7;
+ abc_snap.buffer_info.max_buffers = 7;
+ abc_snap.is_type[0] = IS_TYPE_NONE;
+
+ rc = setmetainfoCommand(test_obj, &abc_snap);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("meta info command snapshot failed\n");
+ }
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_SNAPSHOT;
+ if (num_burst == 0) {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+ stream->s_config.stream_info->num_of_burst = num_burst;
+ }
+ stream->s_config.stream_info->fmt = DEFAULT_SNAPSHOT_FORMAT;
+ if ( test_obj->buffer_width == 0 || test_obj->buffer_height == 0 ) {
+ stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+ stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+ } else {
+ stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+ stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+ }
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_channel_t * mm_app_add_preview_channel(mm_camera_test_obj_t *test_obj)
+{
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *stream = NULL;
+
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_PREVIEW,
+ NULL,
+ NULL,
+ NULL);
+ if (NULL == channel) {
+ LOGE("add channel failed");
+ return NULL;
+ }
+
+ stream = mm_app_add_preview_stream(test_obj,
+ channel,
+ mm_app_preview_notify_cb,
+ (void *)test_obj,
+ PREVIEW_BUF_NUM);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return NULL;
+ }
+
+ return channel;
+}
+
+int mm_app_stop_and_del_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ uint8_t i;
+ cam_stream_size_info_t abc ;
+ memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+ rc = mm_app_stop_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ if (channel->num_streams <= MAX_STREAM_NUM_IN_BUNDLE) {
+ for (i = 0; i < channel->num_streams; i++) {
+ stream = &channel->streams[i];
+ rc = mm_app_del_stream(test_obj, channel, stream);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("del stream(%d) failed rc=%d\n", i, rc);
+ }
+ }
+ } else {
+ LOGE("num_streams = %d. Should not be more than %d\n",
+ channel->num_streams, MAX_STREAM_NUM_IN_BUNDLE);
+ }
+
+ rc = setmetainfoCommand(test_obj, &abc);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("meta info command failed\n");
+ }
+
+ rc = mm_app_del_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("delete channel failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_start_preview(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *stream = NULL;
+ mm_camera_stream_t *s_metadata = NULL;
+ mm_camera_stream_t *s_analysis = NULL;
+ uint8_t i;
+
+ channel = mm_app_add_preview_channel(test_obj);
+ if (NULL == channel) {
+ LOGE("add channel failed");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ s_metadata = mm_app_add_metadata_stream(test_obj,
+ channel,
+ mm_app_metadata_notify_cb,
+ (void *)test_obj,
+ PREVIEW_BUF_NUM);
+ if (NULL == s_metadata) {
+ LOGE("add metadata stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ s_analysis = mm_app_add_analysis_stream(test_obj,
+ channel,
+ NULL,
+ (void *)test_obj,
+ PREVIEW_BUF_NUM);
+ if (NULL == s_analysis) {
+ LOGE("add metadata stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ rc = mm_app_start_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start preview failed rc=%d\n", rc);
+ if (channel->num_streams <= MAX_STREAM_NUM_IN_BUNDLE) {
+ for (i = 0; i < channel->num_streams; i++) {
+ stream = &channel->streams[i];
+ mm_app_del_stream(test_obj, channel, stream);
+ }
+ }
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_stop_preview(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *channel =
+ mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_PREVIEW);
+
+ rc = mm_app_stop_and_del_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_start_preview_zsl(mm_camera_test_obj_t *test_obj)
+{
+ int32_t rc = MM_CAMERA_OK;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *s_preview = NULL;
+ mm_camera_stream_t *s_metadata = NULL;
+ mm_camera_stream_t *s_main = NULL;
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+ attr.look_back = 2;
+ attr.post_frame_skip = 0;
+ attr.water_mark = 2;
+ attr.max_unmatched_frames = 3;
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_ZSL,
+ &attr,
+ mm_app_zsl_notify_cb,
+ test_obj);
+ if (NULL == channel) {
+ LOGE("add channel failed");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ s_preview = mm_app_add_preview_stream(test_obj,
+ channel,
+ mm_app_preview_notify_cb,
+ (void *)test_obj,
+ PREVIEW_BUF_NUM);
+ if (NULL == s_preview) {
+ LOGE("add preview stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ s_main = mm_app_add_snapshot_stream(test_obj,
+ channel,
+ mm_app_snapshot_notify_cb,
+ (void *)test_obj,
+ PREVIEW_BUF_NUM,
+ 0);
+ if (NULL == s_main) {
+ LOGE("add main snapshot stream failed\n");
+ mm_app_del_stream(test_obj, channel, s_preview);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ s_metadata = mm_app_add_metadata_stream(test_obj,
+ channel,
+ mm_app_metadata_notify_cb,
+ (void *)test_obj,
+ PREVIEW_BUF_NUM);
+ if (NULL == s_metadata) {
+ LOGE("add metadata stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ rc = mm_app_start_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start zsl failed rc=%d\n", rc);
+ mm_app_del_stream(test_obj, channel, s_preview);
+ mm_app_del_stream(test_obj, channel, s_metadata);
+ mm_app_del_stream(test_obj, channel, s_main);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ if ( test_obj->enable_reproc ) {
+ if ( NULL == mm_app_add_reprocess_channel(test_obj, s_main) ) {
+ LOGE("Reprocess channel failed to initialize \n");
+ mm_app_del_stream(test_obj, channel, s_preview);
+#ifdef USE_METADATA_STREAM
+ mm_app_del_stream(test_obj, channel, s_metadata);
+#endif
+ mm_app_del_stream(test_obj, channel, s_main);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+ rc = mm_app_start_reprocess(test_obj);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("reprocess start failed rc=%d\n", rc);
+ mm_app_del_stream(test_obj, channel, s_preview);
+#ifdef USE_METADATA_STREAM
+ mm_app_del_stream(test_obj, channel, s_metadata);
+#endif
+ mm_app_del_stream(test_obj, channel, s_main);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+ }
+
+ return rc;
+}
+
+int mm_app_stop_preview_zsl(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_channel_t *channel =
+ mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_ZSL);
+
+ rc = mm_app_stop_and_del_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ if ( test_obj->enable_reproc ) {
+ rc |= mm_app_stop_reprocess(test_obj);
+ }
+
+ return rc;
+}
+
+int mm_app_initialize_fb(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ int brightness_fd;
+ const char brightness_level[] = BACKLIGHT_LEVEL;
+ void *fb_base = NULL;
+
+ assert( ( NULL != test_obj ) && ( 0 == test_obj->fb_fd ) );
+
+ test_obj->fb_fd = open(FB_PATH, O_RDWR);
+ if ( 0 > test_obj->fb_fd ) {
+ LOGE("FB device open failed rc=%d, %s\n",
+ -errno,
+ strerror(errno));
+ rc = -errno;
+ goto FAIL;
+ }
+
+ rc = ioctl(test_obj->fb_fd, FBIOGET_VSCREENINFO, &test_obj->vinfo);
+ if ( MM_CAMERA_OK != rc ) {
+ LOGE("Can not retrieve screen info rc=%d, %s\n",
+ -errno,
+ strerror(errno));
+ rc = -errno;
+ goto FAIL;
+ }
+
+ if ( ( 0 == test_obj->vinfo.yres_virtual ) ||
+ ( 0 == test_obj->vinfo.yres ) ||
+ ( test_obj->vinfo.yres > test_obj->vinfo.yres_virtual ) ) {
+ LOGE("Invalid FB virtual yres: %d, yres: %d\n",
+ test_obj->vinfo.yres_virtual,
+ test_obj->vinfo.yres);
+ rc = MM_CAMERA_E_GENERAL;
+ goto FAIL;
+ }
+
+ if ( ( 0 == test_obj->vinfo.xres_virtual ) ||
+ ( 0 == test_obj->vinfo.xres ) ||
+ ( test_obj->vinfo.xres > test_obj->vinfo.xres_virtual ) ) {
+ LOGE("Invalid FB virtual xres: %d, xres: %d\n",
+ test_obj->vinfo.xres_virtual,
+ test_obj->vinfo.xres);
+ rc = MM_CAMERA_E_GENERAL;
+ goto FAIL;
+ }
+
+ brightness_fd = open(BACKLIGHT_CONTROL, O_RDWR);
+ if ( brightness_fd >= 0 ) {
+ write(brightness_fd, brightness_level, strlen(brightness_level));
+ close(brightness_fd);
+ }
+
+ test_obj->slice_size = test_obj->vinfo.xres * ( test_obj->vinfo.yres - 1 ) * DEFAULT_OV_FORMAT_BPP;
+ memset(&test_obj->data_overlay, 0, sizeof(struct mdp_overlay));
+ test_obj->data_overlay.src.width = test_obj->buffer_width;
+ test_obj->data_overlay.src.height = test_obj->buffer_height;
+ test_obj->data_overlay.src_rect.w = test_obj->buffer_width;
+ test_obj->data_overlay.src_rect.h = test_obj->buffer_height;
+ test_obj->data_overlay.dst_rect.w = test_obj->buffer_width;
+ test_obj->data_overlay.dst_rect.h = test_obj->buffer_height;
+ test_obj->data_overlay.src.format = DEFAULT_OV_FORMAT;
+ test_obj->data_overlay.src_rect.x = 0;
+ test_obj->data_overlay.src_rect.y = 0;
+ test_obj->data_overlay.dst_rect.x = 0;
+ test_obj->data_overlay.dst_rect.y = 0;
+ test_obj->data_overlay.z_order = 2;
+ test_obj->data_overlay.alpha = 0x80;
+ test_obj->data_overlay.transp_mask = 0xffe0;
+ test_obj->data_overlay.flags = MDP_FLIP_LR | MDP_FLIP_UD;
+
+ // Map and clear FB portion
+ fb_base = mmap(0,
+ test_obj->slice_size,
+ PROT_WRITE,
+ MAP_SHARED,
+ test_obj->fb_fd,
+ 0);
+ if ( MAP_FAILED == fb_base ) {
+ LOGE("( Error while memory mapping frame buffer %s",
+ strerror(errno));
+ rc = -errno;
+ goto FAIL;
+ }
+
+ memset(fb_base, 0, test_obj->slice_size);
+
+ if (ioctl(test_obj->fb_fd, FBIOPAN_DISPLAY, &test_obj->vinfo) < 0) {
+ LOGE("FBIOPAN_DISPLAY failed!");
+ rc = -errno;
+ goto FAIL;
+ }
+
+ munmap(fb_base, test_obj->slice_size);
+ test_obj->data_overlay.id = (uint32_t)MSMFB_NEW_REQUEST;
+ rc = ioctl(test_obj->fb_fd, MSMFB_OVERLAY_SET, &test_obj->data_overlay);
+ if (rc < 0) {
+ LOGE("MSMFB_OVERLAY_SET failed! err=%d\n",
+ test_obj->data_overlay.id);
+ return MM_CAMERA_E_GENERAL;
+ }
+ LOGE("Overlay set with overlay id: %d", test_obj->data_overlay.id);
+
+ return rc;
+
+FAIL:
+
+ if ( 0 < test_obj->fb_fd ) {
+ close(test_obj->fb_fd);
+ }
+
+ return rc;
+}
+
+int mm_app_close_fb(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+
+ assert( ( NULL != test_obj ) && ( 0 < test_obj->fb_fd ) );
+
+ if (ioctl(test_obj->fb_fd, MSMFB_OVERLAY_UNSET, &test_obj->data_overlay.id)) {
+ LOGE("\nERROR! MSMFB_OVERLAY_UNSET failed! (Line %d)\n");
+ }
+
+ if (ioctl(test_obj->fb_fd, FBIOPAN_DISPLAY, &test_obj->vinfo) < 0) {
+ LOGE("ERROR: FBIOPAN_DISPLAY failed! line=%d\n");
+ }
+
+ close(test_obj->fb_fd);
+ test_obj->fb_fd = -1;
+
+ return rc;
+}
+
+void memset16(void *pDst, uint16_t value, int count)
+{
+ uint16_t *ptr = pDst;
+ while (count--)
+ *ptr++ = value;
+}
+
+int mm_app_overlay_display(mm_camera_test_obj_t *test_obj, int bufferFd)
+{
+ int rc = MM_CAMERA_OK;
+ struct msmfb_overlay_data ovdata;
+
+
+ memset(&ovdata, 0, sizeof(struct msmfb_overlay_data));
+ ovdata.id = test_obj->data_overlay.id;
+ ovdata.data.memory_id = bufferFd;
+
+ if (ioctl(test_obj->fb_fd, MSMFB_OVERLAY_PLAY, &ovdata)) {
+ LOGE("MSMFB_OVERLAY_PLAY failed!");
+ return MM_CAMERA_E_GENERAL;
+ }
+
+ if (ioctl(test_obj->fb_fd, FBIOPAN_DISPLAY, &test_obj->vinfo) < 0) {
+ LOGE("FBIOPAN_DISPLAY failed!");
+ return MM_CAMERA_E_GENERAL;
+ }
+
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c
new file mode 100644
index 0000000..61176be
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c
@@ -0,0 +1,168 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of The Linux Foundation nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+int mm_camera_queue_init(mm_camera_queue_t *queue,
+ release_data_fn data_rel_fn,
+ void *user_data)
+{
+ if ( NULL == queue ) {
+ return -1;
+ }
+
+ pthread_mutex_init(&queue->m_lock, NULL);
+ cam_list_init(&queue->m_head.list);
+ queue->m_size = 0;
+ queue->m_dataFn = data_rel_fn;
+ queue->m_userData = user_data;
+
+ return MM_CAMERA_OK;
+}
+
+int mm_qcamera_queue_release(mm_camera_queue_t *queue)
+{
+ if ( NULL == queue ) {
+ return -1;
+ }
+
+ mm_qcamera_queue_flush(queue);
+ pthread_mutex_destroy(&queue->m_lock);
+
+ return MM_CAMERA_OK;
+}
+
+int mm_qcamera_queue_isempty(mm_camera_queue_t *queue)
+{
+ if ( NULL == queue ) {
+ return 0;
+ }
+
+ int flag = 1;
+ pthread_mutex_lock(&queue->m_lock);
+ if (queue->m_size > 0) {
+ flag = 0;
+ }
+ pthread_mutex_unlock(&queue->m_lock);
+
+ return flag;
+}
+
+int mm_qcamera_queue_enqueue(mm_camera_queue_t *queue, void *data)
+{
+ if ( NULL == queue ) {
+ return -1;
+ }
+
+ camera_q_node *node =
+ (camera_q_node *)malloc(sizeof(camera_q_node));
+ if (NULL == node) {
+ LOGE(" No memory for camera_q_node");
+ return 0;
+ }
+
+ memset(node, 0, sizeof(camera_q_node));
+ node->data = data;
+
+ pthread_mutex_lock(&queue->m_lock);
+ cam_list_add_tail_node(&node->list, &queue->m_head.list);
+ queue->m_size++;
+ pthread_mutex_unlock(&queue->m_lock);
+
+ return 1;
+}
+
+void* mm_qcamera_queue_dequeue(mm_camera_queue_t *queue, int bFromHead)
+{
+ if ( NULL == queue ) {
+ return NULL;
+ }
+
+ camera_q_node* node = NULL;
+ void* data = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->m_lock);
+ head = &queue->m_head.list;
+ if (bFromHead) {
+ pos = head->next;
+ } else {
+ pos = head->prev;
+ }
+ if (pos != head) {
+ node = member_of(pos, camera_q_node, list);
+ cam_list_del_node(&node->list);
+ queue->m_size--;
+ }
+ pthread_mutex_unlock(&queue->m_lock);
+
+ if (NULL != node) {
+ data = node->data;
+ free(node);
+ }
+
+ return data;
+}
+
+void mm_qcamera_queue_flush(mm_camera_queue_t *queue)
+{
+ camera_q_node* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ if ( NULL == queue ) {
+ return;
+ }
+
+ pthread_mutex_lock(&queue->m_lock);
+ head = &queue->m_head.list;
+ pos = head->next;
+
+ while(pos != head) {
+ node = member_of(pos, camera_q_node, list);
+ pos = pos->next;
+ cam_list_del_node(&node->list);
+ queue->m_size--;
+
+ if (NULL != node->data) {
+ if (queue->m_dataFn) {
+ queue->m_dataFn(node->data, queue->m_userData);
+ }
+ free(node->data);
+ }
+ free(node);
+
+ }
+ queue->m_size = 0;
+ pthread_mutex_unlock(&queue->m_lock);
+}
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c
new file mode 100644
index 0000000..4c07f6a
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c
@@ -0,0 +1,346 @@
+/*
+Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// System dependencies
+#include <fcntl.h>
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+static uint32_t rdi_len = 0;
+
+static void mm_app_rdi_dump_frame(mm_camera_buf_def_t *frame,
+ char *name,
+ char *ext,
+ uint32_t frame_idx)
+{
+ char file_name[FILENAME_MAX];
+ int file_fd;
+ int i;
+
+ if (frame != NULL) {
+ snprintf(file_name, sizeof(file_name),
+ QCAMERA_DUMP_FRM_LOCATION"%s_%03u.%s", name, frame_idx, ext);
+ file_fd = open(file_name, O_RDWR | O_CREAT, 0777);
+ if (file_fd < 0) {
+ LOGE(" cannot open file %s \n", file_name);
+ } else {
+ for (i = 0; i < frame->planes_buf.num_planes; i++) {
+ write(file_fd,
+ (uint8_t *)frame->buffer + frame->planes_buf.planes[i].data_offset,
+ rdi_len);
+ }
+
+ close(file_fd);
+ LOGD(" dump rdi frame %s", file_name);
+ }
+ }
+}
+
+static void mm_app_rdi_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ char file_name[FILENAME_MAX];
+ mm_camera_buf_def_t *frame = bufs->bufs[0];
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+ LOGD(" BEGIN - length=%zu, frame idx = %d stream_id=%d\n",
+ frame->frame_len, frame->frame_idx, frame->stream_id);
+ snprintf(file_name, sizeof(file_name), "RDI_dump_%d", pme->cam->camera_handle);
+ mm_app_rdi_dump_frame(frame, file_name, "raw", frame->frame_idx);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ frame)) {
+ LOGE(" Failed in RDI Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+ ION_IOC_INV_CACHES);
+
+ LOGD(" END\n");
+}
+
+mm_camera_stream_t * mm_app_add_rdi_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst)
+{
+ int rc = MM_CAMERA_OK;
+ size_t i;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+ cam_format_t fmt = CAM_FORMAT_MAX;
+ cam_stream_buf_plane_info_t *buf_planes;
+ cam_stream_size_info_t abc ;
+ memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+
+
+ LOGE(" raw_dim w:%d height:%d\n", cam_cap->raw_dim[0].width, cam_cap->raw_dim[0].height);
+ for (i = 0;i < cam_cap->supported_raw_fmt_cnt;i++) {
+ LOGE(" supported_raw_fmts[%zd]=%d\n",
+ i, (int)cam_cap->supported_raw_fmts[i]);
+ if (((CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG <= cam_cap->supported_raw_fmts[i]) &&
+ (CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR >= cam_cap->supported_raw_fmts[i])) ||
+ (cam_cap->supported_raw_fmts[i] == CAM_FORMAT_META_RAW_8BIT) ||
+ (cam_cap->supported_raw_fmts[i] == CAM_FORMAT_JPEG_RAW_8BIT) ||
+ (cam_cap->supported_raw_fmts[i] == CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR))
+ {
+ fmt = cam_cap->supported_raw_fmts[i];
+ LOGE(" fmt=%d\n", fmt);
+ }
+ }
+
+ if (CAM_FORMAT_MAX == fmt) {
+ LOGE(" rdi format not supported\n");
+ return NULL;
+ }
+
+ abc.num_streams = 1;
+ abc.postprocess_mask[0] = 0;
+ abc.stream_sizes[0].width = cam_cap->raw_dim[0].width;
+ abc.stream_sizes[0].height = cam_cap->raw_dim[0].height;
+ abc.type[0] = CAM_STREAM_TYPE_RAW;
+ abc.buffer_info.min_buffers = num_bufs;
+ abc.buffer_info.max_buffers = num_bufs;
+ abc.is_type[0] = IS_TYPE_NONE;
+
+ rc = setmetainfoCommand(test_obj, &abc);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" meta info command failed\n");
+ }
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE(" add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_RAW;
+ if (num_burst == 0) {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+ stream->s_config.stream_info->num_of_burst = num_burst;
+ }
+ stream->s_config.stream_info->fmt = DEFAULT_RAW_FORMAT;
+ LOGD(" RAW: w: %d, h: %d ",
+ cam_cap->raw_dim[0].width, cam_cap->raw_dim[0].height);
+
+ stream->s_config.stream_info->dim.width = cam_cap->raw_dim[0].width;
+ stream->s_config.stream_info->dim.height = cam_cap->raw_dim[0].height;
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config rdi stream err=%d\n", rc);
+ return NULL;
+ }
+
+ buf_planes = &stream->s_config.stream_info->buf_planes;
+ rdi_len = buf_planes->plane_info.mp[0].len;
+ LOGD(" plane_info %dx%d len:%d frame_len:%d\n",
+ buf_planes->plane_info.mp[0].stride, buf_planes->plane_info.mp[0].scanline,
+ buf_planes->plane_info.mp[0].len, buf_planes->plane_info.frame_len);
+
+ return stream;
+}
+
+mm_camera_stream_t * mm_app_add_rdi_snapshot_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE(" add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_SNAPSHOT;
+ if (num_burst == 0) {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+ stream->s_config.stream_info->num_of_burst = num_burst;
+ }
+ stream->s_config.stream_info->fmt = DEFAULT_SNAPSHOT_FORMAT;
+ stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+ stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config rdi stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_channel_t * mm_app_add_rdi_channel(mm_camera_test_obj_t *test_obj, uint8_t num_burst)
+{
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *stream = NULL;
+
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_RDI,
+ NULL,
+ NULL,
+ NULL);
+ if (NULL == channel) {
+ LOGE(" add channel failed");
+ return NULL;
+ }
+
+ stream = mm_app_add_rdi_stream(test_obj,
+ channel,
+ mm_app_rdi_notify_cb,
+ (void *)test_obj,
+ RDI_BUF_NUM,
+ num_burst);
+ if (NULL == stream) {
+ LOGE(" add stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return NULL;
+ }
+
+ LOGD(" channel=%d stream=%d\n", channel->ch_id, stream->s_id);
+ return channel;
+}
+
+int mm_app_stop_and_del_rdi_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ uint8_t i;
+ cam_stream_size_info_t abc ;
+ memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+ rc = mm_app_stop_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop RDI failed rc=%d\n", rc);
+ }
+
+ if (channel->num_streams <= MAX_STREAM_NUM_IN_BUNDLE) {
+ for (i = 0; i < channel->num_streams; i++) {
+ stream = &channel->streams[i];
+ rc = mm_app_del_stream(test_obj, channel, stream);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("del stream(%d) failed rc=%d\n", i, rc);
+ }
+ }
+ } else {
+ LOGE(" num_streams = %d. Should not be more than %d\n",
+ channel->num_streams, MAX_STREAM_NUM_IN_BUNDLE);
+ }
+ rc = setmetainfoCommand(test_obj, &abc);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" meta info command failed\n");
+ }
+ rc = mm_app_del_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("delete channel failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_start_rdi(mm_camera_test_obj_t *test_obj, uint8_t num_burst)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *channel = NULL;
+
+ channel = mm_app_add_rdi_channel(test_obj, num_burst);
+ if (NULL == channel) {
+ LOGE(" add channel failed");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ rc = mm_app_start_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start rdi failed rc=%d\n", rc);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_stop_rdi(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_channel_t *channel =
+ mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_RDI);
+
+ rc = mm_app_stop_and_del_rdi_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop RDI failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c
new file mode 100644
index 0000000..4ed4c5d
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c
@@ -0,0 +1,349 @@
+/*
+Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+static void mm_app_reprocess_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ mm_camera_buf_def_t *frame = bufs->bufs[0];
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *m_stream = NULL;
+ mm_camera_buf_def_t *m_frame = NULL;
+ mm_camera_super_buf_t *src_frame;
+ int i = 0;
+ int rc = 0;
+
+ LOGE(" BEGIN - length=%zu, frame idx = %d\n",
+ frame->frame_len, frame->frame_idx);
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+ if (NULL == channel) {
+ LOGE(" Wrong channel id (%d)", bufs->ch_id);
+ return;
+ }
+
+ // We have only one stream and buffer
+ // in the reprocess channel.
+ m_stream = &channel->streams[0];
+ m_frame = bufs->bufs[0];
+
+ if ( pme->encodeJpeg ) {
+ pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+ if ( NULL == pme->jpeg_buf.buf.buffer ) {
+ LOGE(" error allocating jpeg output buffer");
+ goto exit;
+ }
+
+ pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+ /* create a new jpeg encoding session */
+ rc = createEncodingSession(pme, m_stream, m_frame);
+ if (0 != rc) {
+ LOGE(" error creating jpeg session");
+ free(pme->jpeg_buf.buf.buffer);
+ goto exit;
+ }
+
+ /* start jpeg encoding job */
+ LOGE("Encoding reprocessed frame!!");
+ rc = encodeData(pme, bufs, m_stream);
+ pme->encodeJpeg = 0;
+ } else {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ frame)) {
+ LOGE(" Failed in Reprocess Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+
+exit:
+
+// Release source frame
+ src_frame = ( mm_camera_super_buf_t * ) mm_qcamera_queue_dequeue(&pme->pp_frames, 1);
+ if ( NULL != src_frame ) {
+ mm_app_release_ppinput((void *) src_frame, (void *) pme);
+ }
+
+ LOGE(" END\n");
+}
+
+mm_camera_stream_t * mm_app_add_reprocess_stream_from_source(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_stream_t *source,
+ mm_camera_buf_notify_t stream_cb,
+ cam_pp_feature_config_t pp_config,
+ void *userdata,
+ uint8_t num_bufs)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = NULL;
+ cam_stream_info_t *source_stream_info;
+
+ if ( ( NULL == test_obj ) ||
+ ( NULL == channel ) ||
+ ( NULL == source ) ) {
+ LOGE(" Invalid input\n");
+ return NULL;
+ }
+
+ cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE(" add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ source_stream_info = (cam_stream_info_t *) source->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ stream->s_config.stream_info->fmt = source_stream_info->fmt;
+ stream->s_config.stream_info->dim = source_stream_info->dim;
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+
+ stream->s_config.stream_info->reprocess_config.pp_type = CAM_ONLINE_REPROCESS_TYPE;
+ stream->s_config.stream_info->reprocess_config.online.input_stream_id = source->s_config.stream_info->stream_svr_id;
+ stream->s_config.stream_info->reprocess_config.online.input_stream_type = source->s_config.stream_info->stream_type;
+ stream->s_config.stream_info->reprocess_config.pp_feature_config = pp_config;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_channel_t * mm_app_add_reprocess_channel(mm_camera_test_obj_t *test_obj,
+ mm_camera_stream_t *source_stream)
+{
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *stream = NULL;
+
+ if ( NULL == source_stream ) {
+ LOGE(" add reprocess stream failed\n");
+ return NULL;
+ }
+
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_REPROCESS,
+ NULL,
+ NULL,
+ NULL);
+ if (NULL == channel) {
+ LOGE(" add channel failed");
+ return NULL;
+ }
+
+ // pp feature config
+ cam_pp_feature_config_t pp_config;
+ memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+ cam_capability_t *caps = ( cam_capability_t * ) ( test_obj->cap_buf.buf.buffer );
+ if (caps->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SHARPNESS) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+ pp_config.sharpness = test_obj->reproc_sharpness;
+ }
+
+ if (test_obj->reproc_wnr.denoise_enable) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+ pp_config.denoise2d = test_obj->reproc_wnr;
+ }
+
+ if (test_obj->enable_CAC) {
+ pp_config.feature_mask |= CAM_QCOM_FEATURE_CAC;
+ }
+
+ uint8_t minStreamBufNum = source_stream->num_of_bufs;
+ stream = mm_app_add_reprocess_stream_from_source(test_obj,
+ channel,
+ source_stream,
+ mm_app_reprocess_notify_cb,
+ pp_config,
+ (void *)test_obj,
+ minStreamBufNum);
+ if (NULL == stream) {
+ LOGE(" add reprocess stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return NULL;
+ }
+ test_obj->reproc_stream = stream;
+
+ return channel;
+}
+
+int mm_app_start_reprocess(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *r_ch = NULL;
+
+ mm_camera_queue_init(&test_obj->pp_frames,
+ mm_app_release_ppinput,
+ ( void * ) test_obj);
+
+ r_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_REPROCESS);
+ if (MM_CAMERA_OK != rc) {
+ LOGE(" No initialized reprocess channel d rc=%d\n", rc);
+ return rc;
+ }
+
+ rc = mm_app_start_channel(test_obj, r_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start reprocess failed rc=%d\n", rc);
+ mm_app_del_channel(test_obj, r_ch);
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_stop_reprocess(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *r_ch = NULL;
+
+ r_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_REPROCESS);
+ if (MM_CAMERA_OK != rc) {
+ LOGE(" No initialized reprocess channel d rc=%d\n", rc);
+ return rc;
+ }
+
+ rc = mm_app_stop_and_del_channel(test_obj, r_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ mm_qcamera_queue_release(&test_obj->pp_frames);
+ test_obj->reproc_stream = NULL;
+
+ return rc;
+}
+
+int mm_app_do_reprocess(mm_camera_test_obj_t *test_obj,
+ mm_camera_buf_def_t *frame,
+ uint32_t meta_idx,
+ mm_camera_super_buf_t *super_buf,
+ mm_camera_stream_t *src_meta)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *r_ch = NULL;
+ mm_camera_super_buf_t *src_buf = NULL;
+
+ if ( ( NULL == test_obj ) ||
+ ( NULL == frame ) ||
+ ( NULL == super_buf )) {
+ LOGE(" Invalid input rc=%d\n", rc);
+ return rc;
+ }
+
+ if ( NULL == test_obj->reproc_stream ) {
+ LOGE(" No reprocess stream rc=%d\n", rc);
+ return rc;
+ }
+
+ r_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_REPROCESS);
+ if (MM_CAMERA_OK != rc) {
+ LOGE(" No reprocess channel rc=%d\n", rc);
+ return rc;
+ }
+
+ src_buf = ( mm_camera_super_buf_t * ) malloc(sizeof(mm_camera_super_buf_t));
+ if ( NULL == src_buf ) {
+ LOGE(" No resources for src frame rc=%d\n", rc);
+ return -1;
+ }
+ memcpy(src_buf, super_buf, sizeof(mm_camera_super_buf_t));
+ mm_qcamera_queue_enqueue(&test_obj->pp_frames, src_buf);
+
+ cam_stream_parm_buffer_t param;
+ memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+ param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+ param.reprocess.buf_index = frame->buf_idx;
+ param.reprocess.frame_idx = frame->frame_idx;
+ if (src_meta != NULL) {
+ param.reprocess.meta_present = 1;
+ param.reprocess.meta_stream_handle = src_meta->s_config.stream_info->stream_svr_id;
+ param.reprocess.meta_buf_index = meta_idx;
+ } else {
+ LOGE(" No metadata source stream rc=%d\n", rc);
+ }
+
+ test_obj->reproc_stream->s_config.stream_info->parm_buf = param;
+ rc = test_obj->cam->ops->set_stream_parms(test_obj->cam->camera_handle,
+ r_ch->ch_id,
+ test_obj->reproc_stream->s_id,
+ &test_obj->reproc_stream->s_config.stream_info->parm_buf);
+
+ return rc;
+}
+
+void mm_app_release_ppinput(void *data, void *user_data)
+{
+ uint32_t i = 0;
+ mm_camera_super_buf_t *recvd_frame = ( mm_camera_super_buf_t * ) data;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+ for ( i = 0 ; i < recvd_frame->num_bufs ; i++) {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,
+ recvd_frame->ch_id,
+ recvd_frame->bufs[i])) {
+ LOGE(" Failed in Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *) recvd_frame->bufs[i]->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+}
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c
new file mode 100644
index 0000000..b56e6b4
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c
@@ -0,0 +1,711 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+/* This callback is received once the complete JPEG encoding is done */
+static void jpeg_encode_cb(jpeg_job_status_t status,
+ uint32_t client_hdl,
+ uint32_t jobId,
+ mm_jpeg_output_t *p_buf,
+ void *userData)
+{
+ uint32_t i = 0;
+ mm_camera_test_obj_t *pme = NULL;
+ LOGD(" BEGIN\n");
+
+ pme = (mm_camera_test_obj_t *)userData;
+ if (pme->jpeg_hdl != client_hdl ||
+ jobId != pme->current_job_id ||
+ !pme->current_job_frames) {
+ LOGE(" NULL current job frames or not matching job ID (%d, %d)",
+ jobId, pme->current_job_id);
+ return;
+ }
+
+ /* dump jpeg img */
+ LOGE(" job %d, status=%d", jobId, status);
+ if (status == JPEG_JOB_STATUS_DONE && p_buf != NULL) {
+ mm_app_dump_jpeg_frame(p_buf->buf_vaddr, p_buf->buf_filled_len, "jpeg", "jpg", jobId);
+ }
+
+ /* buf done current encoding frames */
+ pme->current_job_id = 0;
+ for (i = 0; i < pme->current_job_frames->num_bufs; i++) {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->current_job_frames->camera_handle,
+ pme->current_job_frames->ch_id,
+ pme->current_job_frames->bufs[i])) {
+ LOGE(" Failed in Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *) pme->current_job_frames->bufs[i]->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+
+ free(pme->jpeg_buf.buf.buffer);
+ free(pme->current_job_frames);
+ pme->current_job_frames = NULL;
+
+ /* signal snapshot is done */
+ mm_camera_app_done();
+}
+
+int encodeData(mm_camera_test_obj_t *test_obj, mm_camera_super_buf_t* recvd_frame,
+ mm_camera_stream_t *m_stream)
+{
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+ int rc = -MM_CAMERA_E_GENERAL;
+ mm_jpeg_job_t job;
+
+ /* remember current frames being encoded */
+ test_obj->current_job_frames =
+ (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+ if (!test_obj->current_job_frames) {
+ LOGE(" No memory for current_job_frames");
+ return rc;
+ }
+ *(test_obj->current_job_frames) = *recvd_frame;
+
+ memset(&job, 0, sizeof(job));
+ job.job_type = JPEG_JOB_TYPE_ENCODE;
+ job.encode_job.session_id = test_obj->current_jpeg_sess_id;
+
+ // TODO: Rotation should be set according to
+ // sensor&device orientation
+ job.encode_job.rotation = 0;
+ if (cam_cap->position == CAM_POSITION_BACK) {
+ job.encode_job.rotation = 270;
+ }
+
+ /* fill in main src img encode param */
+ job.encode_job.main_dim.src_dim = m_stream->s_config.stream_info->dim;
+ job.encode_job.main_dim.dst_dim = m_stream->s_config.stream_info->dim;
+ job.encode_job.src_index = 0;
+
+ job.encode_job.thumb_dim.src_dim = m_stream->s_config.stream_info->dim;
+ job.encode_job.thumb_dim.dst_dim.width = DEFAULT_PREVIEW_WIDTH;
+ job.encode_job.thumb_dim.dst_dim.height = DEFAULT_PREVIEW_HEIGHT;
+
+ /* fill in sink img param */
+ job.encode_job.dst_index = 0;
+
+ if (test_obj->metadata != NULL) {
+ job.encode_job.p_metadata = test_obj->metadata;
+ } else {
+ LOGE(" Metadata null, not set for jpeg encoding");
+ }
+
+ rc = test_obj->jpeg_ops.start_job(&job, &test_obj->current_job_id);
+ if ( 0 != rc ) {
+ free(test_obj->current_job_frames);
+ test_obj->current_job_frames = NULL;
+ }
+
+ return rc;
+}
+
+int createEncodingSession(mm_camera_test_obj_t *test_obj,
+ mm_camera_stream_t *m_stream,
+ mm_camera_buf_def_t *m_frame)
+{
+ mm_jpeg_encode_params_t encode_param;
+
+ memset(&encode_param, 0, sizeof(mm_jpeg_encode_params_t));
+ encode_param.jpeg_cb = jpeg_encode_cb;
+ encode_param.userdata = (void*)test_obj;
+ encode_param.encode_thumbnail = 0;
+ encode_param.quality = 85;
+ encode_param.color_format = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+ encode_param.thumb_color_format = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+
+ /* fill in main src img encode param */
+ encode_param.num_src_bufs = 1;
+ encode_param.src_main_buf[0].index = 0;
+ encode_param.src_main_buf[0].buf_size = m_frame->frame_len;
+ encode_param.src_main_buf[0].buf_vaddr = (uint8_t *)m_frame->buffer;
+ encode_param.src_main_buf[0].fd = m_frame->fd;
+ encode_param.src_main_buf[0].format = MM_JPEG_FMT_YUV;
+ encode_param.src_main_buf[0].offset = m_stream->offset;
+
+ /* fill in sink img param */
+ encode_param.num_dst_bufs = 1;
+ encode_param.dest_buf[0].index = 0;
+ encode_param.dest_buf[0].buf_size = test_obj->jpeg_buf.buf.frame_len;
+ encode_param.dest_buf[0].buf_vaddr = (uint8_t *)test_obj->jpeg_buf.buf.buffer;
+ encode_param.dest_buf[0].fd = test_obj->jpeg_buf.buf.fd;
+ encode_param.dest_buf[0].format = MM_JPEG_FMT_YUV;
+
+ /* main dimension */
+ encode_param.main_dim.src_dim = m_stream->s_config.stream_info->dim;
+ encode_param.main_dim.dst_dim = m_stream->s_config.stream_info->dim;
+
+ return test_obj->jpeg_ops.create_session(test_obj->jpeg_hdl,
+ &encode_param,
+ &test_obj->current_jpeg_sess_id);
+}
+
+/** mm_app_snapshot_metadata_notify_cb
+ * @bufs: Pointer to super buffer
+ * @user_data: Pointer to user data
+ *
+ *
+ **/
+__unused
+static void mm_app_snapshot_metadata_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ uint32_t i = 0;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *p_stream = NULL;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_buf_def_t *frame;
+ metadata_buffer_t *pMetadata;
+
+ if (NULL == bufs || NULL == user_data) {
+ LOGE(" bufs or user_data are not valid ");
+ return;
+ }
+ frame = bufs->bufs[0];
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+
+ if (NULL == channel) {
+ LOGE(" Channel object is null");
+ return;
+ }
+
+ /* find meta stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+ p_stream = &channel->streams[i];
+ break;
+ }
+ }
+
+ if (NULL == p_stream) {
+ LOGE(" cannot find metadata stream");
+ return;
+ }
+
+ /* find meta frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+ frame = bufs->bufs[i];
+ break;
+ }
+ }
+
+ if (!pme->metadata) {
+ /* The app will free the metadata, we don't need to bother here */
+ pme->metadata = malloc(sizeof(metadata_buffer_t));
+ if (NULL == pme->metadata) {
+ LOGE(" malloc failed");
+ return;
+ }
+ }
+
+ memcpy(pme->metadata , frame->buffer, sizeof(metadata_buffer_t));
+
+ pMetadata = (metadata_buffer_t *)frame->buffer;
+
+ IF_META_AVAILABLE(cam_auto_focus_data_t, focus_data,
+ CAM_INTF_META_AUTOFOCUS_DATA, pMetadata) {
+ if (focus_data->focus_state == CAM_AF_STATE_FOCUSED_LOCKED) {
+ LOGE(" AutoFocus Done Call Back Received\n");
+ mm_camera_app_done();
+ } else if (focus_data->focus_state == CAM_AF_STATE_NOT_FOCUSED_LOCKED) {
+ LOGE(" AutoFocus failed\n");
+ mm_camera_app_done();
+ }
+ }
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ frame)) {
+ LOGE(" Failed in Preview Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+ ION_IOC_INV_CACHES);
+}
+
+static void mm_app_snapshot_notify_cb_raw(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+
+ int rc;
+ uint32_t i = 0;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *m_stream = NULL;
+ mm_camera_buf_def_t *m_frame = NULL;
+
+ LOGD(" BEGIN\n");
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+ if (NULL == channel) {
+ LOGE(" Wrong channel id (%d)", bufs->ch_id);
+ rc = -1;
+ goto EXIT;
+ }
+
+ /* find snapshot stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_RAW) {
+ m_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL == m_stream) {
+ LOGE(" cannot find snapshot stream");
+ rc = -1;
+ goto EXIT;
+ }
+
+ /* find snapshot frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+ m_frame = bufs->bufs[i];
+ break;
+ }
+ }
+ if (NULL == m_frame) {
+ LOGE(" main frame is NULL");
+ rc = -1;
+ goto EXIT;
+ }
+
+ mm_app_dump_frame(m_frame, "main", "raw", m_frame->frame_idx);
+
+EXIT:
+ for (i=0; i<bufs->num_bufs; i++) {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ bufs->bufs[i])) {
+ LOGE(" Failed in Qbuf\n");
+ }
+ }
+
+ mm_camera_app_done();
+
+ LOGD(" END\n");
+}
+
+static void mm_app_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+
+ int rc = 0;
+ uint32_t i = 0;
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *p_stream = NULL;
+ mm_camera_stream_t *m_stream = NULL;
+ mm_camera_buf_def_t *p_frame = NULL;
+ mm_camera_buf_def_t *m_frame = NULL;
+
+ /* find channel */
+ for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+ if (pme->channels[i].ch_id == bufs->ch_id) {
+ channel = &pme->channels[i];
+ break;
+ }
+ }
+ if (NULL == channel) {
+ LOGE(" Wrong channel id (%d)", bufs->ch_id);
+ rc = -1;
+ goto error;
+ }
+
+ /* find snapshot stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+ m_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL == m_stream) {
+ LOGE(" cannot find snapshot stream");
+ rc = -1;
+ goto error;
+ }
+
+ /* find snapshot frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+ m_frame = bufs->bufs[i];
+ break;
+ }
+ }
+ if (NULL == m_frame) {
+ LOGE(" main frame is NULL");
+ rc = -1;
+ goto error;
+ }
+
+ mm_app_dump_frame(m_frame, "main", "yuv", m_frame->frame_idx);
+
+ /* find postview stream */
+ for (i = 0; i < channel->num_streams; i++) {
+ if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_POSTVIEW) {
+ p_stream = &channel->streams[i];
+ break;
+ }
+ }
+ if (NULL != p_stream) {
+ /* find preview frame */
+ for (i = 0; i < bufs->num_bufs; i++) {
+ if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+ p_frame = bufs->bufs[i];
+ break;
+ }
+ }
+ if (NULL != p_frame) {
+ mm_app_dump_frame(p_frame, "postview", "yuv", p_frame->frame_idx);
+ }
+ }
+
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+ ION_IOC_CLEAN_INV_CACHES);
+
+ pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+ if ( NULL == pme->jpeg_buf.buf.buffer ) {
+ LOGE(" error allocating jpeg output buffer");
+ goto error;
+ }
+
+ pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+ /* create a new jpeg encoding session */
+ rc = createEncodingSession(pme, m_stream, m_frame);
+ if (0 != rc) {
+ LOGE(" error creating jpeg session");
+ free(pme->jpeg_buf.buf.buffer);
+ goto error;
+ }
+
+ /* start jpeg encoding job */
+ rc = encodeData(pme, bufs, m_stream);
+ if (0 != rc) {
+ LOGE(" error creating jpeg session");
+ free(pme->jpeg_buf.buf.buffer);
+ goto error;
+ }
+
+error:
+ /* buf done rcvd frames in error case */
+ if ( 0 != rc ) {
+ for (i=0; i<bufs->num_bufs; i++) {
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ bufs->bufs[i])) {
+ LOGE(" Failed in Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)bufs->bufs[i]->mem_info,
+ ION_IOC_INV_CACHES);
+ }
+ }
+
+ LOGD(" END\n");
+}
+
+mm_camera_channel_t * mm_app_add_snapshot_channel(mm_camera_test_obj_t *test_obj)
+{
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *stream = NULL;
+
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_SNAPSHOT,
+ NULL,
+ NULL,
+ NULL);
+ if (NULL == channel) {
+ LOGE(" add channel failed");
+ return NULL;
+ }
+
+ stream = mm_app_add_snapshot_stream(test_obj,
+ channel,
+ mm_app_snapshot_notify_cb,
+ (void *)test_obj,
+ 1,
+ 1);
+ if (NULL == stream) {
+ LOGE(" add snapshot stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return NULL;
+ }
+
+ return channel;
+}
+
+mm_camera_stream_t * mm_app_add_postview_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs,
+ uint8_t num_burst)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE(" add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_POSTVIEW;
+ if (num_burst == 0) {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ } else {
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+ stream->s_config.stream_info->num_of_burst = num_burst;
+ }
+ stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+ stream->s_config.stream_info->dim.width = DEFAULT_PREVIEW_WIDTH;
+ stream->s_config.stream_info->dim.height = DEFAULT_PREVIEW_HEIGHT;
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config postview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+int mm_app_start_capture_raw(mm_camera_test_obj_t *test_obj, uint8_t num_snapshots)
+{
+ int32_t rc = MM_CAMERA_OK;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *s_main = NULL;
+ mm_camera_channel_attr_t attr;
+
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.max_unmatched_frames = 3;
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_CAPTURE,
+ &attr,
+ mm_app_snapshot_notify_cb_raw,
+ test_obj);
+ if (NULL == channel) {
+ LOGE(" add channel failed");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ test_obj->buffer_format = DEFAULT_RAW_FORMAT;
+ s_main = mm_app_add_raw_stream(test_obj,
+ channel,
+ mm_app_snapshot_notify_cb_raw,
+ test_obj,
+ num_snapshots,
+ num_snapshots);
+ if (NULL == s_main) {
+ LOGE(" add main snapshot stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ rc = mm_app_start_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start zsl failed rc=%d\n", rc);
+ mm_app_del_stream(test_obj, channel, s_main);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_stop_capture_raw(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *ch = NULL;
+ int i;
+ cam_stream_size_info_t abc ;
+ memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+ ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_CAPTURE);
+
+ rc = mm_app_stop_channel(test_obj, ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("stop recording failed rc=%d\n", rc);
+ }
+
+ for ( i = 0 ; i < ch->num_streams ; i++ ) {
+ mm_app_del_stream(test_obj, ch, &ch->streams[i]);
+ }
+ rc = setmetainfoCommand(test_obj, &abc);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" meta info command failed\n");
+ }
+ mm_app_del_channel(test_obj, ch);
+
+ return rc;
+}
+
+int mm_app_start_capture(mm_camera_test_obj_t *test_obj,
+ uint8_t num_snapshots)
+{
+ int32_t rc = MM_CAMERA_OK;
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *s_main = NULL;
+ mm_camera_stream_t *s_post = NULL;
+ mm_camera_channel_attr_t attr;
+ memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+ attr.max_unmatched_frames = 3;
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_CAPTURE,
+ &attr,
+ mm_app_snapshot_notify_cb,
+ test_obj);
+ if (NULL == channel) {
+ LOGE(" add channel failed");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ s_main = mm_app_add_snapshot_stream(test_obj,
+ channel,
+ mm_app_snapshot_notify_cb,
+ (void *)test_obj,
+ CAPTURE_BUF_NUM,
+ num_snapshots);
+ if (NULL == s_main) {
+ LOGE(" add main snapshot stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ s_post = mm_app_add_postview_stream(test_obj,
+ channel,
+ NULL,
+ NULL,
+ CAPTURE_BUF_NUM,
+ num_snapshots);
+ if (NULL == s_main) {
+ LOGE(" add main postview stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ rc = mm_app_start_channel(test_obj, channel);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start zsl failed rc=%d\n", rc);
+ mm_app_del_stream(test_obj, channel, s_main);
+ mm_app_del_channel(test_obj, channel);
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_stop_capture(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *ch = NULL;
+
+ ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_CAPTURE);
+
+ rc = mm_app_stop_and_del_channel(test_obj, ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("stop capture channel failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_take_picture(mm_camera_test_obj_t *test_obj, uint8_t is_burst_mode)
+{
+ LOGH("\nEnter %s!!\n");
+ int rc = MM_CAMERA_OK;
+ uint8_t num_snapshot = 1;
+ int num_rcvd_snapshot = 0;
+
+ if (is_burst_mode)
+ num_snapshot = 6;
+
+ //stop preview before starting capture.
+ rc = mm_app_stop_preview(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" stop preview failed before capture!!, err=%d\n", rc);
+ return rc;
+ }
+
+ rc = mm_app_start_capture(test_obj, num_snapshot);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_capture(), err=%d\n", rc);
+ return rc;
+ }
+ while (num_rcvd_snapshot < num_snapshot) {
+ LOGH("\nWaiting mm_camera_app_wait !!\n");
+ mm_camera_app_wait();
+ num_rcvd_snapshot++;
+ }
+ rc = mm_app_stop_capture(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_capture(), err=%d\n", rc);
+ return rc;
+ }
+ //start preview after capture.
+ rc = mm_app_start_preview(test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" start preview failed after capture!!, err=%d\n",rc);
+ }
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
new file mode 100644
index 0000000..7ab8db8
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
@@ -0,0 +1,879 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <errno.h>
+
+// Camera dependencies
+#include "mm_qcamera_socket.h"
+#include "mm_qcamera_commands.h"
+#include "mm_qcamera_dbg.h"
+
+#define IP_ADDR "127.0.0.1"
+#define TUNING_CHROMATIX_PORT 55555
+#define TUNING_PREVIEW_PORT 55556
+
+#define CURRENT_COMMAND_ACK_SUCCESS 1
+#define CURRENT_COMMAND_ACK_FAILURE 2
+
+pthread_t eztune_thread_id;
+
+static ssize_t tuneserver_send_command_rsp(tuningserver_t *tsctrl,
+ char *send_buf, uint32_t send_len)
+{
+ ssize_t rc;
+
+ /* send ack back to client upon req */
+ if (send_len <= 0) {
+ LOGE("Invalid send len \n");
+ return -1;
+ }
+ if (send_buf == NULL) {
+ LOGE("Invalid send buf \n");
+ return -1;
+ }
+
+ rc = send(tsctrl->clientsocket_id, send_buf, send_len, 0);
+ if (rc < 0) {
+ LOGE("RSP send returns error %s\n", strerror(errno));
+ } else {
+ rc = 0;
+ }
+
+ if (send_buf != NULL) {
+ free(send_buf);
+ send_buf = NULL;
+ }
+ return rc;
+}
+
+static void release_eztune_prevcmd_rsp(eztune_prevcmd_rsp *pHead)
+{
+ if (pHead != NULL ) {
+ release_eztune_prevcmd_rsp((eztune_prevcmd_rsp *)pHead->next);
+ free(pHead);
+ }
+}
+
+static ssize_t tuneserver_ack(uint16_t a, uint32_t b, tuningserver_t *tsctrl)
+{
+ ssize_t rc;
+ char ack_1[6];
+ /*Ack the command here*/
+ memcpy(ack_1, &a, 2);
+ memcpy(ack_1+2, &b, 4);
+ /* send echo back to client upon accept */
+ rc = send(tsctrl->clientsocket_id, &ack_1, sizeof(ack_1), 0);
+ if (rc < 0) {
+ LOGE(" eztune_server_run: send returns error %s\n",
+ strerror(errno));
+ return rc;
+ } else if (rc < (int32_t)sizeof(ack_1)) {
+ /*Shouldn't hit this for packets <1K; need to re-send if we do*/
+ }
+ return 0;
+}
+
+static ssize_t tuneserver_send_command_ack( uint8_t ack,
+ tuningserver_t *tsctrl)
+{
+ ssize_t rc;
+ /* send ack back to client upon req */
+ rc = send(tsctrl->clientsocket_id, &ack, sizeof(ack), 0);
+ if (rc < 0) {
+ LOGE("ACK send returns error %s\n", strerror(errno));
+ return rc;
+ }
+ return 0;
+}
+
+/** tuneserver_process_command
+ * @tsctrl: the server control object
+ *
+ * Processes the command that the client sent
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+static int32_t tuneserver_process_command(tuningserver_t *tsctrl,
+ char *send_buf, uint32_t send_len)
+{
+ tuneserver_protocol_t *p = tsctrl->proto;
+ int result = 0;
+
+ LOGD(" Current command is %d\n", p->current_cmd);
+ switch (p->current_cmd) {
+ case TUNESERVER_GET_LIST:
+ if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+ LOGE(" Ack Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ result = tuneserver_process_get_list_cmd(tsctrl, p->recv_buf,
+ send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+ LOGE(" RSP Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ break;
+
+ case TUNESERVER_GET_PARMS:
+ if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+ LOGE(" Ack Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ result = tuneserver_process_get_params_cmd(tsctrl, p->recv_buf,
+ send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+ LOGE(" RSP Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ break;
+
+ case TUNESERVER_SET_PARMS:
+ if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+ LOGE(" Ack Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ result = tuneserver_process_set_params_cmd(tsctrl, p->recv_buf,
+ send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+ LOGE(" RSP Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ break;
+
+ case TUNESERVER_MISC_CMDS: {
+ if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+ LOGE(" Ack Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ result = tuneserver_process_misc_cmd(tsctrl, p->recv_buf,
+ send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+ LOGE(" RSP Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ break;
+ }
+
+ default:
+ if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+ LOGE(" Ack Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ LOGE(" p->current_cmd: default\n");
+ result = -1;
+ break;
+ }
+
+ return result;
+}
+
+/** tuneserver_process_client_message
+ * @recv_buffer: received message from the client
+ * @tsctrl: the server control object
+ *
+ * Processes the message from client and prepares for next
+ * message.
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+static int32_t tuneserver_process_client_message(void *recv_buffer,
+ tuningserver_t *tsctrl)
+{
+ int rc = 0;
+ tuneserver_protocol_t *p = tsctrl->proto;
+
+ switch (tsctrl->proto->next_recv_code) {
+ case TUNESERVER_RECV_COMMAND:
+ p->current_cmd = *(uint16_t *)recv_buffer;
+ p->next_recv_code = TUNESERVER_RECV_PAYLOAD_SIZE;
+ p->next_recv_len = sizeof(uint32_t);
+ break;
+
+ case TUNESERVER_RECV_PAYLOAD_SIZE:
+ p->next_recv_code = TUNESERVER_RECV_PAYLOAD;
+ p->next_recv_len = *(uint32_t *)recv_buffer;
+ p->recv_len = p->next_recv_len;
+ if (p->next_recv_len > TUNESERVER_MAX_RECV)
+ return -1;
+ if (p->next_recv_len == 0) {
+ p->next_recv_code = TUNESERVER_RECV_RESPONSE;
+ p->next_recv_len = sizeof(uint32_t);
+ }
+ break;
+
+ case TUNESERVER_RECV_PAYLOAD:
+ p->recv_buf = malloc(p->next_recv_len);
+ if (!p->recv_buf) {
+ LOGE("Error allocating memory for recv_buf %s\n",
+ strerror(errno));
+ return -1;
+ }
+ memcpy(p->recv_buf, recv_buffer, p->next_recv_len);
+ p->next_recv_code = TUNESERVER_RECV_RESPONSE;
+ p->next_recv_len = sizeof(uint32_t);
+ /*Process current command at this point*/
+ break;
+
+ case TUNESERVER_RECV_RESPONSE:
+ p->next_recv_code = TUNESERVER_RECV_COMMAND;
+ p->next_recv_len = 2;
+ p->send_len = *(uint32_t *)recv_buffer;
+ p->send_buf = (char *)calloc(p->send_len, sizeof(char *));
+ if (!p->send_buf) {
+ LOGE("Error allocating memory for send_buf %s\n",
+ strerror(errno));
+ return -1;
+ }
+ rc = tuneserver_process_command(tsctrl, p->send_buf, p->send_len);
+ free(p->recv_buf);
+ p->recv_buf = NULL;
+ p->recv_len = 0;
+ break;
+
+ default:
+ LOGE(" p->next_recv_code: default\n");
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/** tuneserver_ack_onaccept_initprotocol
+ * @tsctrl: the server control object
+ *
+ * Acks a connection from the cient and sets up the
+ * protocol object to start receiving commands.
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+static ssize_t tuneserver_ack_onaccept_initprotocol(tuningserver_t *tsctrl)
+{
+ ssize_t rc = 0;
+ uint32_t ack_status;
+
+ LOGE("starts\n");
+/*
+ if(tsctrl->camera_running) {
+ ack_status = 1;
+ } else {
+ ack_status = 2;
+ }
+*/
+ ack_status = 1;
+
+ rc = tuneserver_ack(1, ack_status, tsctrl);
+
+ tsctrl->proto = malloc(sizeof(tuneserver_protocol_t));
+ if (!tsctrl->proto) {
+ LOGE(" malloc returns NULL with error %s\n", strerror(errno));
+ return -1;
+ }
+
+ tsctrl->proto->current_cmd = 0xFFFF;
+ tsctrl->proto->next_recv_code = TUNESERVER_RECV_COMMAND;
+ tsctrl->proto->next_recv_len = 2;
+ tsctrl->proto->recv_buf = NULL;
+ tsctrl->proto->send_buf = NULL;
+
+ LOGD("X\n");
+
+ return rc;
+}
+
+/** tuneserver_check_status
+ * @tsctrl: the server control object
+ *
+ * Checks if camera is running and stops it.
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+#if 0
+static void tuneserver_check_status(tuningserver_t *tsctrl)
+{
+ if (tsctrl->camera_running == 1) {
+ /*TODO: Stop camera here*/
+ tuneserver_stop_cam(&tsctrl->lib_handle);
+ }
+ tsctrl->camera_running = 0;
+
+ tuneserver_close_cam(&tsctrl->lib_handle);
+}
+#endif
+
+static ssize_t prevserver_send_command_rsp(tuningserver_t *tsctrl,
+ char *send_buf, uint32_t send_len)
+{
+ ssize_t rc;
+
+ /* send ack back to client upon req */
+ if (send_len <= 0) {
+ LOGE("Invalid send len \n");
+ return -1;
+ }
+ if (send_buf == NULL) {
+ LOGE("Invalid send buf \n");
+ return -1;
+ }
+
+ rc = send(tsctrl->pr_clientsocket_id, send_buf, send_len, 0);
+ if (rc < 0) {
+ LOGE("RSP send returns error %s\n", strerror(errno));
+ } else {
+ rc = 0;
+ }
+ if (send_buf != NULL) {
+ free(send_buf);
+ send_buf = NULL;
+ }
+ return rc;
+}
+
+static void prevserver_init_protocol(tuningserver_t *tsctrl)
+{
+ tsctrl->pr_proto = malloc(sizeof(prserver_protocol_t));
+ if (!tsctrl->pr_proto) {
+ LOGE(" malloc returns NULL with error %s\n",
+ strerror(errno));
+ return;
+ }
+
+ tsctrl->pr_proto->current_cmd = 0xFFFF;
+ tsctrl->pr_proto->next_recv_code = TUNE_PREV_RECV_COMMAND;
+ tsctrl->pr_proto->next_recv_len = 2;
+}
+
+static int32_t prevserver_process_command(
+ tuningserver_t *tsctrl, char **send_buf, uint32_t *send_len)
+{
+ prserver_protocol_t *p = tsctrl->pr_proto;
+ int result = 0;
+ eztune_prevcmd_rsp *rsp_ptr=NULL, *rspn_ptr=NULL, *head_ptr=NULL;
+
+ LOGD(" Current command is %d\n", p->current_cmd);
+ switch (p->current_cmd) {
+ case TUNE_PREV_GET_INFO:
+ result = tuneserver_preview_getinfo(tsctrl, send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n",
+ p->current_cmd);
+ return -1;
+ }
+ rsp_ptr = (eztune_prevcmd_rsp *)*send_buf;
+ if ((!rsp_ptr) || (!rsp_ptr->send_buf)) {
+ LOGE(" RSP ptr is NULL %d\n", p->current_cmd);
+ return -1;
+ }
+ if (prevserver_send_command_rsp(tsctrl,
+ rsp_ptr->send_buf, rsp_ptr->send_len)) {
+ LOGE(" RSP Failed for TUNE_PREV_GET_INFO ver cmd %d\n",
+ p->current_cmd);
+ return -1;
+ }
+ rspn_ptr = (eztune_prevcmd_rsp *)rsp_ptr->next;
+ if ((!rspn_ptr) || (!rspn_ptr->send_buf)) {
+ LOGE(" RSP1 ptr is NULL %d\n", p->current_cmd);
+ return -1;
+ }
+ if (prevserver_send_command_rsp(tsctrl,
+ rspn_ptr->send_buf, rspn_ptr->send_len)) {
+ LOGE(" RSP Failed for TUNE_PREV_GET_INFO caps cmd %d\n",
+ p->current_cmd);
+ return -1;
+ }
+ free(rspn_ptr);
+ free(rsp_ptr);
+ break;
+
+ case TUNE_PREV_CH_CNK_SIZE:
+ result = tuneserver_preview_getchunksize(tsctrl, send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ if (prevserver_send_command_rsp(tsctrl, *send_buf, *send_len)) {
+ LOGE(" RSP Failed for TUNE_PREV_CH_CNK_SIZE cmd %d\n",
+ p->current_cmd);
+ return -1;
+ }
+ break;
+
+ case TUNE_PREV_GET_PREV_FRAME:
+ result = tuneserver_preview_getframe(tsctrl, send_buf, send_len);
+ if (result < 0) {
+ LOGE(" RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ rsp_ptr = (eztune_prevcmd_rsp *)*send_buf;
+ if ((!rsp_ptr) || (!rsp_ptr->send_buf)) {
+ LOGE(" RSP ptr is NULL %d\n", p->current_cmd);
+ return -1;
+ }
+ head_ptr = rsp_ptr;
+
+ while (rsp_ptr != NULL) {
+ if ((!rsp_ptr) || (!rsp_ptr->send_buf)) {
+ LOGE(" RSP ptr is NULL %d\n", p->current_cmd);
+ return -1;
+ }
+ if (prevserver_send_command_rsp(tsctrl,
+ rsp_ptr->send_buf, rsp_ptr->send_len)) {
+ LOGE(" RSP Failed for TUNE_PREV_GET_INFO ver cmd %d\n",
+ p->current_cmd);
+ return -1;
+ }
+ rsp_ptr = (eztune_prevcmd_rsp *)rsp_ptr->next;
+ }
+ release_eztune_prevcmd_rsp(head_ptr);
+ break;
+
+ case TUNE_PREV_GET_JPG_SNAP:
+ case TUNE_PREV_GET_RAW_SNAP:
+ case TUNE_PREV_GET_RAW_PREV:
+ result = tuneserver_preview_unsupported(tsctrl, send_buf, send_len);
+ if (result < 0) {
+ LOGE("RSP processing Failed for cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ if (prevserver_send_command_rsp(tsctrl, *send_buf, *send_len)) {
+ LOGE("RSP Failed for UNSUPPORTED cmd %d\n", p->current_cmd);
+ return -1;
+ }
+ break;
+
+ default:
+ LOGE(" p->current_cmd: default\n");
+ result = -1;
+ break;
+ }
+
+ return result;
+}
+
+/** previewserver_process_client_message
+ * @recv_buffer: received message from the client
+ * @tsctrl: the server control object
+ *
+ * Processes the message from client and prepares for next
+ * message.
+ *
+ * Return: >=0 on success, -1 on failure.
+ **/
+static int32_t prevserver_process_client_message(void *recv_buffer,
+ tuningserver_t *tsctrl)
+{
+ int rc = 0;
+ prserver_protocol_t *p = tsctrl->pr_proto;
+
+ LOGD("command = %d", p->next_recv_code);
+
+ switch (p->next_recv_code) {
+ case TUNE_PREV_RECV_COMMAND:
+ p->current_cmd = *(uint16_t *)recv_buffer;
+ if(p->current_cmd != TUNE_PREV_CH_CNK_SIZE) {
+ rc = prevserver_process_command(tsctrl,
+ &p->send_buf, (uint32_t *)&p->send_len);
+ break;
+ }
+ p->next_recv_code = TUNE_PREV_RECV_NEWCNKSIZE;
+ p->next_recv_len = sizeof(uint32_t);
+ LOGD("TUNE_PREV_COMMAND X\n");
+ break;
+ case TUNE_PREV_RECV_NEWCNKSIZE:
+ p->new_cnk_size = *(uint32_t *)recv_buffer;
+ p->next_recv_code = TUNE_PREV_RECV_COMMAND;
+ p->next_recv_len = 2;
+ rc = prevserver_process_command(tsctrl,
+ &p->send_buf, (uint32_t *)&p->send_len);
+ break;
+ default:
+ LOGE("prev_proc->next_recv_code: default\n");
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+/** tunning_server_socket_listen
+ * @ip_addr: the ip addr to listen
+ * @port: the port to listen
+ *
+ * Setup a listen socket for eztune.
+ *
+ * Return: >0 on success, <=0 on failure.
+ **/
+int tunning_server_socket_listen(const char* ip_addr, uint16_t port)
+{
+ int sock_fd = -1;
+ mm_qcamera_sock_addr_t server_addr;
+ int result;
+ int option;
+ int socket_flag;
+
+ memset(&server_addr, 0, sizeof(server_addr));
+ server_addr.addr_in.sin_family = AF_INET;
+ server_addr.addr_in.sin_port = (__be16) htons(port);
+ server_addr.addr_in.sin_addr.s_addr = inet_addr(ip_addr);
+
+ if (server_addr.addr_in.sin_addr.s_addr == INADDR_NONE) {
+ LOGE(" invalid address.\n");
+ return -1;
+ }
+
+ /* Create an AF_INET stream socket to receive incoming connection ON */
+ sock_fd = socket(AF_INET, SOCK_STREAM, 0);
+ if (sock_fd < 0) {
+ LOGE(" socket failed\n");
+ return sock_fd;
+ }
+
+ // set listen socket to non-block, but why??
+ socket_flag = fcntl(sock_fd, F_GETFL, 0);
+ fcntl(sock_fd, F_SETFL, socket_flag | O_NONBLOCK);
+
+ /* reuse in case it is in timeout */
+ option = 1;
+ result = setsockopt(sock_fd, SOL_SOCKET, SO_REUSEADDR,
+ &option, sizeof(option));
+
+ if (result < 0) {
+ LOGE("eztune setsockopt failed");
+ close(sock_fd);
+ sock_fd = -1;
+ return sock_fd;
+ }
+
+ result = bind(sock_fd, &server_addr.addr, sizeof(server_addr.addr_in));
+ if (result < 0) {
+ LOGE("eztune socket bind failed");
+ close(sock_fd);
+ sock_fd = -1;
+ return sock_fd;
+ }
+
+ result = listen(sock_fd, 1);
+ if (result < 0) {
+ LOGE("eztune socket listen failed");
+ close(sock_fd);
+ sock_fd = -1;
+ return sock_fd;
+ }
+
+ LOGH("sock_fd: %d, listen at port: %d\n", sock_fd, port);
+
+ return sock_fd;
+}
+
+/** main
+ *
+ * Creates the server, and starts waiting for
+ * connections/messages from a prospective
+ * client
+ *
+ **/
+void *eztune_proc(void *data)
+{
+ int server_socket = -1, client_socket = -1;
+ int prev_server_socket = -1, prev_client_socket = -1;
+
+ mm_qcamera_sock_addr_t addr_client_inet;
+ socklen_t addr_client_len = sizeof(addr_client_inet.addr_in);
+ int result;
+ fd_set tsfds;
+ int num_fds = 0;
+ ssize_t recv_bytes;
+ char buf[TUNESERVER_MAX_RECV];
+
+ mm_camera_lib_handle *lib_handle = (mm_camera_lib_handle *)data;
+
+ LOGE(">>> Starting tune server <<< \n");
+
+ // for eztune chromatix params
+ server_socket = tunning_server_socket_listen(IP_ADDR, TUNING_CHROMATIX_PORT);
+ if (server_socket <= 0) {
+ LOGE("[ERR] fail to setup listen socket for eztune chromatix parms...");
+ return NULL;
+ }
+ prev_server_socket = tunning_server_socket_listen(IP_ADDR, TUNING_PREVIEW_PORT);
+ if (prev_server_socket <= 0) {
+ LOGE("[ERR] fail to setup listen socket for eztune preview...\n");
+ return NULL;
+ }
+ num_fds = TUNESERVER_MAX(server_socket, prev_server_socket);
+ LOGH("num_fds = %d\n", num_fds);
+
+ do {
+ FD_ZERO(&tsfds);
+ FD_SET(server_socket, &tsfds);
+ FD_SET(prev_server_socket, &tsfds);
+ if (client_socket > 0) {
+ FD_SET(client_socket, &tsfds);
+ }
+ if (prev_client_socket > 0) {
+ FD_SET( prev_client_socket, &tsfds);
+ }
+
+ /* no timeout */
+ result = select(num_fds + 1, &tsfds, NULL, NULL, NULL);
+ if (result < 0) {
+ LOGE("select failed: %s\n", strerror(errno));
+ continue;
+ }
+
+ /*
+ ** (1) CHROMATIX SERVER
+ */
+ if (FD_ISSET(server_socket, &tsfds)) {
+ LOGD("Receiving New client connection\n");
+
+ client_socket = accept(server_socket,
+ &addr_client_inet.addr, &addr_client_len);
+ if (client_socket == -1) {
+ LOGE("accept failed %s", strerror(errno));
+ continue;
+ }
+
+ if (client_socket >= FD_SETSIZE) {
+ LOGE("client_socket is out of range. client_socket=%d",client_socket);
+ continue;
+ }
+
+ LOGE("accept a new connect on 55555, sd(%d)\n", client_socket);
+ num_fds = TUNESERVER_MAX(num_fds, client_socket);
+
+ // open camera and get handle - this is needed to
+ // be able to set parameters without starting
+ // preview stream
+ /*if (!tsctrl.camera_running) {
+ result = tuneserver_open_cam(&tsctrl.lib_handle, &tsctrl);
+ if(result) {
+ printf("\n Camera Open Fail !!! \n");
+ close(server_socket);
+ return EXIT_FAILURE;
+ }
+ }*/
+ result = tuneserver_open_cam(lib_handle);
+ if(result) {
+ LOGE("\n Tuning Library open failed!!!\n");
+ close(server_socket);
+ return NULL;
+ }
+ lib_handle->tsctrl.clientsocket_id = client_socket;
+ if (tuneserver_ack_onaccept_initprotocol(&lib_handle->tsctrl) < 0) {
+ LOGE(" Error while acking\n");
+ close(client_socket);
+ continue;
+ }
+ tuneserver_initialize_tuningp(lib_handle, client_socket,
+ lib_handle->tsctrl.proto->send_buf, lib_handle->tsctrl.proto->send_len);
+ }
+
+ if ((client_socket < FD_SETSIZE) && (FD_ISSET(client_socket, &tsfds))) {
+ if (lib_handle->tsctrl.proto == NULL) {
+ LOGE(" Cannot receive msg without connect\n");
+ continue;
+ }
+
+ /*Receive message and process it*/
+ recv_bytes = recv(client_socket, (void *)buf,
+ lib_handle->tsctrl.proto->next_recv_len, 0);
+ LOGD("Receive %lld bytes \n", (long long int) recv_bytes);
+
+ if (recv_bytes == -1) {
+ LOGE(" Receive failed with error %s\n", strerror(errno));
+ //tuneserver_check_status(&tsctrl);
+ continue;
+ } else if (recv_bytes == 0) {
+ LOGE("connection has been terminated\n");
+
+ tuneserver_deinitialize_tuningp(&lib_handle->tsctrl, client_socket,
+ lib_handle->tsctrl.proto->send_buf,
+ lib_handle->tsctrl.proto->send_len);
+ free(lib_handle->tsctrl.proto);
+ lib_handle->tsctrl.proto = NULL;
+
+ close(client_socket);
+ client_socket = -1;
+ //tuneserver_check_status(&tsctrl);
+ } else {
+ LOGD(" Processing socket command\n");
+
+ result = tuneserver_process_client_message(buf, &lib_handle->tsctrl);
+
+ if (result < 0) {
+ LOGE("Protocol violated\n");
+
+ free(lib_handle->tsctrl.proto);
+ lib_handle->tsctrl.proto = NULL;
+
+ close(client_socket);
+ client_socket = -1;
+ //tuneserver_check_status(&tsctrl);
+ continue;
+ }
+ }
+ }
+
+ /*
+ ** (2) PREVIEW SERVER
+ */
+ if (FD_ISSET(prev_server_socket, &tsfds)) {
+ LOGD("Receiving New Preview client connection\n");
+
+ prev_client_socket = accept(prev_server_socket,
+ &addr_client_inet.addr, &addr_client_len);
+ if (prev_client_socket == -1) {
+ LOGE("accept failed %s", strerror(errno));
+ continue;
+ }
+ if (prev_client_socket >= FD_SETSIZE) {
+ LOGE("prev_client_socket is out of range. prev_client_socket=%d",prev_client_socket);
+ continue;
+ }
+
+ lib_handle->tsctrl.pr_clientsocket_id = prev_client_socket;
+
+ LOGD("Accepted a new connection, fd(%d)\n", prev_client_socket);
+ num_fds = TUNESERVER_MAX(num_fds, prev_client_socket);
+
+ // start camera
+ /*if (!tsctrl.camera_running) {
+ result = 0;
+ result = tuneserver_open_cam(&tsctrl.lib_handle, &tsctrl);
+ if(result) {
+ printf("\n Camera Open Fail !!! \n");
+ return EXIT_FAILURE;
+ }
+ }*/
+ cam_dimension_t dim;
+ //dim.width = lib_handle->test_obj.buffer_width;
+ //dim.height = lib_handle->test_obj.buffer_height;
+ dim.width = DEFAULT_PREVIEW_WIDTH;
+ dim.height = DEFAULT_PREVIEW_HEIGHT;
+
+ LOGD("preview dimension info: w(%d), h(%d)\n", dim.width, dim.height);
+ // we have to make sure that camera is running, before init connection,
+ // because we need to know the frame size for allocating the memory.
+ prevserver_init_protocol(&lib_handle->tsctrl);
+
+ result = tuneserver_initialize_prevtuningp(lib_handle, prev_client_socket,
+ dim, (char **)&lib_handle->tsctrl.proto->send_buf,
+ &lib_handle->tsctrl.proto->send_len);
+ if (result < 0) {
+ LOGE("tuneserver_initialize_prevtuningp error!");
+ close(prev_client_socket);
+ prev_client_socket = -1;
+ }
+ }
+
+ if ((prev_client_socket < FD_SETSIZE) && (FD_ISSET(prev_client_socket, &tsfds))) {
+ recv_bytes = recv(prev_client_socket, (void *)buf,
+ lib_handle->tsctrl.pr_proto->next_recv_len, 0);
+
+ LOGD("prev_client_socket=%d\n", prev_client_socket);
+ LOGD("next_recv_len=%d\n", buf[0]+buf[1]*256);
+
+ if (recv_bytes <= 0) {
+ if (recv_bytes == 0) {
+ LOGE("client close the connection.\n");
+ } else {
+ LOGE("receive error: %s\n", strerror(errno));
+ }
+
+ //tuneserver_check_status(&tsctrl);
+ // if recv error, we should close the connection, free the proto data,
+ // AND wait for a new connecton..
+ // close_connection();
+ // stop_camera()
+ // cleanup_proto_data();
+ tuneserver_deinitialize_prevtuningp(&lib_handle->tsctrl,
+ (char **)&lib_handle->tsctrl.proto->send_buf,
+ &lib_handle->tsctrl.proto->send_len);
+ close(prev_client_socket);
+ prev_client_socket = -1;
+ } else {
+ result = prevserver_process_client_message((void *)buf,
+ &lib_handle->tsctrl);
+ if (result < 0) {
+ LOGE("Protocol violated\n");
+
+ //free(tsctrl->preivew_proto);
+ //free(tsctrl);
+ //max_fd = ezt_parms_listen_sd + 1;
+ tuneserver_deinitialize_prevtuningp(&lib_handle->tsctrl,
+ (char **)&lib_handle->tsctrl.proto->send_buf,
+ &lib_handle->tsctrl.proto->send_len);
+ close(prev_client_socket);
+ prev_client_socket = -1;
+ //tuneserver_check_status(&tsctrl);
+ }
+ //sleep(1);
+ }
+ }
+ } while (1);
+
+ if (server_socket >= 0) {
+ close(server_socket);
+ }
+ if (client_socket >= 0) {
+ close(client_socket);
+ }
+ if (prev_server_socket >= 0) {
+ close(prev_server_socket);
+ }
+ if (prev_client_socket >= 0) {
+ close(prev_client_socket);
+ }
+
+ return EXIT_SUCCESS;
+}
+
+int eztune_server_start (void *lib_handle)
+{
+ return pthread_create(&eztune_thread_id, NULL, eztune_proc, lib_handle);
+}
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c
new file mode 100644
index 0000000..c6eaab7
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c
@@ -0,0 +1,695 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 1
+#define MM_QCAMERA_APP_UTEST_OUTER_LOOP 1
+#define MM_QCAMERA_APP_UTEST_INNER_LOOP 1
+#define MM_QCAM_APP_TEST_NUM 128
+
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+
+int mm_app_tc_open_close(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying open/close cameras...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ sleep(1);
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_start_stop_preview(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying start/stop preview...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ sleep(1);
+ rc = mm_app_stop_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc |= mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_start_stop_zsl(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying start/stop preview...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < 1; j++) {
+ rc = mm_app_start_preview_zsl(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_preview_zsl() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ sleep(1);
+ rc = mm_app_stop_preview_zsl(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_preview_zsl() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_start_stop_video_preview(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying start/stop video preview...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_record_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ sleep(1);
+ rc = mm_app_stop_record_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_start_stop_video_record(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying start/stop recording...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ rc = mm_app_start_record_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ sleep(1);
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_record(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_record() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ sleep(1);
+
+ rc = mm_app_stop_record(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_record() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc != MM_CAMERA_OK) {
+ LOGE("start/stop record cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_stop_record_preview(&test_obj);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ rc = mm_app_stop_record_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_start_stop_live_snapshot(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying start/stop live snapshot...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ rc = mm_app_start_record_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ sleep(1);
+
+ rc = mm_app_start_record(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_record() cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_stop_record_preview(&test_obj);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ sleep(1);
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_live_snapshot(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_start_live_snapshot() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ /* wait for jpeg is done */
+ mm_camera_app_wait();
+
+ rc = mm_app_stop_live_snapshot(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_live_snapshot() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc != MM_CAMERA_OK) {
+ LOGE("start/stop live snapshot cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_stop_record(&test_obj);
+ mm_app_stop_record_preview(&test_obj);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ rc = mm_app_stop_record(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_record() cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_stop_record_preview(&test_obj);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ sleep(1);
+
+ rc = mm_app_stop_record_preview(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ mm_app_close(&test_obj);
+ break;
+ }
+
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_capture_raw(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+ uint8_t num_snapshot = 1;
+ uint8_t num_rcvd_snapshot = 0;
+
+ printf("\n Verifying raw capture...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_capture_raw(&test_obj, num_snapshot);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_capture() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ while (num_rcvd_snapshot < num_snapshot) {
+ mm_camera_app_wait();
+ num_rcvd_snapshot++;
+ }
+ rc = mm_app_stop_capture_raw(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_capture() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc |= mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_capture_regular(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+ uint8_t num_snapshot = 1;
+ uint8_t num_rcvd_snapshot = 0;
+
+ printf("\n Verifying capture...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_capture(&test_obj, num_snapshot);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_capture() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ while (num_rcvd_snapshot < num_snapshot) {
+ mm_camera_app_wait();
+ num_rcvd_snapshot++;
+ }
+ rc = mm_app_stop_capture(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_capture() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_capture_burst(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+ uint8_t num_snapshot = 3;
+ uint8_t num_rcvd_snapshot = 0;
+
+ printf("\n Verifying capture...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_capture(&test_obj, num_snapshot);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_capture() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ while (num_rcvd_snapshot < num_snapshot) {
+ mm_camera_app_wait();
+ num_rcvd_snapshot++;
+ }
+ rc = mm_app_stop_capture(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_capture() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc = mm_app_close(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_rdi_burst(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK, rc2 = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying rdi burst (3) capture...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_rdi(&test_obj, 3);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ sleep(1);
+ rc = mm_app_stop_rdi(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc2 = mm_app_close(&test_obj);
+ if (rc2 != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc2);
+ if (rc == MM_CAMERA_OK) {
+ rc = rc2;
+ }
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_tc_rdi_cont(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK, rc2 = MM_CAMERA_OK;
+ int i, j;
+ mm_camera_test_obj_t test_obj;
+
+ printf("\n Verifying rdi continuous capture...\n");
+ for (i = 0; i < cam_app->num_cameras; i++) {
+ memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+ rc = mm_app_open(cam_app, i, &test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+ rc = mm_app_start_rdi(&test_obj, 0);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_start_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ sleep(1);
+ rc = mm_app_stop_rdi(&test_obj);
+ if (rc != MM_CAMERA_OK) {
+ LOGE(" mm_app_stop_preview() cam_idx=%d, err=%d\n",
+ i, rc);
+ break;
+ }
+ }
+
+ rc2 = mm_app_close(&test_obj);
+ if (rc2 != MM_CAMERA_OK) {
+ LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+ i, rc2);
+ if (rc == MM_CAMERA_OK) {
+ rc = rc2;
+ }
+ break;
+ }
+ }
+ if (rc == MM_CAMERA_OK) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ LOGD("END, rc = %d\n", rc);
+ return rc;
+}
+
+int mm_app_gen_test_cases()
+{
+ int tc = 0;
+ memset(mm_app_tc, 0, sizeof(mm_app_tc));
+ if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_open_close;
+ if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_preview;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_zsl;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_video_preview;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_video_record;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_live_snapshot;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_capture_regular;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_capture_burst;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_rdi_cont;
+ //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_rdi_burst;
+
+ return tc;
+}
+
+int mm_app_unit_test_entry(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, j, tc = 0;
+
+ tc = mm_app_gen_test_cases();
+ LOGD("Running %d test cases\n",tc);
+ for (i = 0; i < tc; i++) {
+ for (j = 0; j < MM_QCAMERA_APP_UTEST_OUTER_LOOP; j++) {
+ mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+ if (mm_app_tc[i].r != MM_CAMERA_OK) {
+ printf(" test case %d (iteration %d) error = %d, abort unit testing engine!!!!\n",
+ i, j, mm_app_tc[i].r);
+ rc = mm_app_tc[i].r;
+ goto end;
+ }
+ }
+ }
+end:
+ printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+ return rc;
+}
+
+
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c
new file mode 100644
index 0000000..ba0a57f
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c
@@ -0,0 +1,258 @@
+/*
+Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+static void mm_app_video_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ char file_name[64];
+ mm_camera_buf_def_t *frame = bufs->bufs[0];
+ mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+ LOGD("BEGIN - length=%zu, frame idx = %d\n",
+ frame->frame_len, frame->frame_idx);
+ snprintf(file_name, sizeof(file_name), "V_C%d", pme->cam->camera_handle);
+ mm_app_dump_frame(frame, file_name, "yuv", frame->frame_idx);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+ bufs->ch_id,
+ frame)) {
+ LOGE("Failed in Preview Qbuf\n");
+ }
+ mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+ ION_IOC_INV_CACHES);
+
+ LOGD("END\n");
+}
+
+mm_camera_stream_t * mm_app_add_video_stream(mm_camera_test_obj_t *test_obj,
+ mm_camera_channel_t *channel,
+ mm_camera_buf_notify_t stream_cb,
+ void *userdata,
+ uint8_t num_bufs)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream = NULL;
+ cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+ stream = mm_app_add_stream(test_obj, channel);
+ if (NULL == stream) {
+ LOGE("add stream failed\n");
+ return NULL;
+ }
+
+ stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+ stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+ stream->s_config.mem_vtbl.clean_invalidate_buf =
+ mm_app_stream_clean_invalidate_buf;
+ stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+ stream->s_config.mem_vtbl.user_data = (void *)stream;
+ stream->s_config.stream_cb = stream_cb;
+ stream->s_config.stream_cb_sync = NULL;
+ stream->s_config.userdata = userdata;
+ stream->num_of_bufs = num_bufs;
+
+ stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+ memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+ stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_VIDEO;
+ stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+ stream->s_config.stream_info->fmt = DEFAULT_VIDEO_FORMAT;
+ stream->s_config.stream_info->dim.width = DEFAULT_VIDEO_WIDTH;
+ stream->s_config.stream_info->dim.height = DEFAULT_VIDEO_HEIGHT;
+ stream->s_config.padding_info = cam_cap->padding_info;
+
+ rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("config preview stream err=%d\n", rc);
+ return NULL;
+ }
+
+ return stream;
+}
+
+mm_camera_channel_t * mm_app_add_video_channel(mm_camera_test_obj_t *test_obj)
+{
+ mm_camera_channel_t *channel = NULL;
+ mm_camera_stream_t *stream = NULL;
+
+ channel = mm_app_add_channel(test_obj,
+ MM_CHANNEL_TYPE_VIDEO,
+ NULL,
+ NULL,
+ NULL);
+ if (NULL == channel) {
+ LOGE("add channel failed");
+ return NULL;
+ }
+
+ stream = mm_app_add_video_stream(test_obj,
+ channel,
+ mm_app_video_notify_cb,
+ (void *)test_obj,
+ 1);
+ if (NULL == stream) {
+ LOGE("add video stream failed\n");
+ mm_app_del_channel(test_obj, channel);
+ return NULL;
+ }
+
+ return channel;
+}
+
+int mm_app_start_record_preview(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *p_ch = NULL;
+ mm_camera_channel_t *v_ch = NULL;
+ mm_camera_channel_t *s_ch = NULL;
+
+ p_ch = mm_app_add_preview_channel(test_obj);
+ if (NULL == p_ch) {
+ LOGE("add preview channel failed");
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ v_ch = mm_app_add_video_channel(test_obj);
+ if (NULL == v_ch) {
+ LOGE("add video channel failed");
+ mm_app_del_channel(test_obj, p_ch);
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ s_ch = mm_app_add_snapshot_channel(test_obj);
+ if (NULL == s_ch) {
+ LOGE("add snapshot channel failed");
+ mm_app_del_channel(test_obj, p_ch);
+ mm_app_del_channel(test_obj, v_ch);
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ rc = mm_app_start_channel(test_obj, p_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start preview failed rc=%d\n", rc);
+ mm_app_del_channel(test_obj, p_ch);
+ mm_app_del_channel(test_obj, v_ch);
+ mm_app_del_channel(test_obj, s_ch);
+ return rc;
+ }
+
+ return rc;
+}
+
+int mm_app_stop_record_preview(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *p_ch = NULL;
+ mm_camera_channel_t *v_ch = NULL;
+ mm_camera_channel_t *s_ch = NULL;
+
+ p_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_PREVIEW);
+ v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+ s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+ rc = mm_app_stop_and_del_channel(test_obj, p_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ rc = mm_app_stop_and_del_channel(test_obj, v_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ rc = mm_app_stop_and_del_channel(test_obj, s_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("Stop Preview failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_start_record(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *v_ch = NULL;
+
+ v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+
+ rc = mm_app_start_channel(test_obj, v_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start recording failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_stop_record(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *v_ch = NULL;
+
+ v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+
+ rc = mm_app_stop_channel(test_obj, v_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("stop recording failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_start_live_snapshot(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *s_ch = NULL;
+
+ s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+ rc = mm_app_start_channel(test_obj, s_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("start recording failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
+
+int mm_app_stop_live_snapshot(mm_camera_test_obj_t *test_obj)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_channel_t *s_ch = NULL;
+
+ s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+ rc = mm_app_stop_channel(test_obj, s_ch);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("stop recording failed rc=%d\n", rc);
+ }
+
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/Android.mk b/camera/QCamera2/stack/mm-jpeg-interface/Android.mk
new file mode 100644
index 0000000..175796b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/Android.mk
@@ -0,0 +1,82 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_CFLAGS+= -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LIB2D_ROTATION=false
+
+LOCAL_C_INCLUDES += \
+ frameworks/native/include/media/openmax \
+ $(LOCAL_PATH)/inc \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../mm-camera-interface/inc \
+ $(LOCAL_PATH)/../../.. \
+ $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+ $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+ifeq ($(strip $(LIB2D_ROTATION)),true)
+ LOCAL_C_INCLUDES += $(LOCAL_PATH)/../mm-lib2d-interface/inc
+ LOCAL_CFLAGS += -DLIB2D_ROTATION_ENABLE
+endif
+
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+ LOCAL_CFLAGS += -DUSE_ION
+endif
+
+ifneq (,$(filter msm8610,$(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS+= -DLOAD_ADSP_RPC_LIB
+endif
+
+DUAL_JPEG_TARGET_LIST := msm8974
+DUAL_JPEG_TARGET_LIST += msm8994
+
+ifneq (,$(filter $(DUAL_JPEG_TARGET_LIST),$(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS+= -DMM_JPEG_CONCURRENT_SESSIONS_COUNT=2
+else
+ LOCAL_CFLAGS+= -DMM_JPEG_CONCURRENT_SESSIONS_COUNT=1
+endif
+
+JPEG_PIPELINE_TARGET_LIST := msm8994
+JPEG_PIPELINE_TARGET_LIST += msm8992
+JPEG_PIPELINE_TARGET_LIST += msm8996
+JPEG_PIPELINE_TARGET_LIST += msmcobalt
+
+ifneq (,$(filter $(JPEG_PIPELINE_TARGET_LIST),$(TARGET_BOARD_PLATFORM)))
+ LOCAL_CFLAGS+= -DMM_JPEG_USE_PIPELINE
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_SRC_FILES := \
+ src/mm_jpeg_queue.c \
+ src/mm_jpeg_exif.c \
+ src/mm_jpeg.c \
+ src/mm_jpeg_interface.c \
+ src/mm_jpeg_ionbuf.c \
+ src/mm_jpegdec_interface.c \
+ src/mm_jpegdec.c \
+ src/mm_jpeg_mpo_composer.c
+
+LOCAL_MODULE := libmmjpeg_interface
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libqomx_core libmmcamera_interface
+ifeq ($(strip $(LIB2D_ROTATION)),true)
+ LOCAL_SHARED_LIBRARIES += libmmlib2d_interface
+endif
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
new file mode 100644
index 0000000..885a4b8
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
@@ -0,0 +1,539 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_H_
+#define MM_JPEG_H_
+
+// OpenMAX dependencies
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "QOMX_JpegExtensions.h"
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+
+// Camera dependencies
+#include "cam_list.h"
+#include "cam_semaphore.h"
+
+#define MM_JPEG_MAX_THREADS 30
+#define MM_JPEG_CIRQ_SIZE 30
+#define MM_JPEG_MAX_SESSION 10
+#define MAX_EXIF_TABLE_ENTRIES 50
+#define MAX_JPEG_SIZE 20000000
+#define MAX_OMX_HANDLES (5)
+// Thumbnail src and dest aspect ratio diffrence tolerance
+#define ASPECT_TOLERANCE 0.001
+
+
+/** mm_jpeg_abort_state_t:
+ * @MM_JPEG_ABORT_NONE: Abort is not issued
+ * @MM_JPEG_ABORT_INIT: Abort is issued from the client
+ * @MM_JPEG_ABORT_DONE: Abort is completed
+ *
+ * State representing the abort state
+ **/
+typedef enum {
+ MM_JPEG_ABORT_NONE,
+ MM_JPEG_ABORT_INIT,
+ MM_JPEG_ABORT_DONE,
+} mm_jpeg_abort_state_t;
+
+
+/* define max num of supported concurrent jpeg jobs by OMX engine.
+ * Current, only one per time */
+#define NUM_MAX_JPEG_CNCURRENT_JOBS 2
+
+#define JOB_ID_MAGICVAL 0x1
+#define JOB_HIST_MAX 10000
+
+/** DUMP_TO_FILE:
+ * @filename: file name
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+ size_t rc = 0; \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ rc = fwrite(p_addr, 1, len, fp); \
+ LOGE("written size %zu", len); \
+ fclose(fp); \
+ } else { \
+ LOGE("open %s failed", filename); \
+ } \
+})
+
+/** DUMP_TO_FILE2:
+ * @filename: file name
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file if the memory is non-contiguous
+ **/
+#define DUMP_TO_FILE2(filename, p_addr1, len1, paddr2, len2) ({ \
+ size_t rc = 0; \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ rc = fwrite(p_addr1, 1, len1, fp); \
+ rc = fwrite(p_addr2, 1, len2, fp); \
+ LOGE("written %zu %zu", len1, len2); \
+ fclose(fp); \
+ } else { \
+ LOGE("open %s failed", filename); \
+ } \
+})
+
+/** MM_JPEG_CHK_ABORT:
+ * @p: client pointer
+ * @ret: return value
+ * @label: label to jump to
+ *
+ * check the abort failure
+ **/
+#define MM_JPEG_CHK_ABORT(p, ret, label) ({ \
+ if (MM_JPEG_ABORT_INIT == p->abort_state) { \
+ LOGE("jpeg abort"); \
+ ret = OMX_ErrorNone; \
+ goto label; \
+ } \
+})
+
+#define GET_CLIENT_IDX(x) ((x) & 0xff)
+#define GET_SESSION_IDX(x) (((x) >> 8) & 0xff)
+#define GET_JOB_IDX(x) (((x) >> 16) & 0xff)
+
+typedef struct {
+ union {
+ int i_data[MM_JPEG_CIRQ_SIZE];
+ void *p_data[MM_JPEG_CIRQ_SIZE];
+ };
+ int front;
+ int rear;
+ int count;
+ pthread_mutex_t lock;
+} mm_jpeg_cirq_t;
+
+/** cirq_reset:
+ *
+ * Arguments:
+ * @q: circular queue
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * Resets the circular queue
+ *
+ **/
+static inline void cirq_reset(mm_jpeg_cirq_t *q)
+{
+ q->front = 0;
+ q->rear = 0;
+ q->count = 0;
+ pthread_mutex_init(&q->lock, NULL);
+}
+
+/** cirq_empty:
+ *
+ * Arguments:
+ * @q: circular queue
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * check if the curcular queue is empty
+ *
+ **/
+#define cirq_empty(q) (q->count == 0)
+
+/** cirq_full:
+ *
+ * Arguments:
+ * @q: circular queue
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * check if the curcular queue is full
+ *
+ **/
+#define cirq_full(q) (q->count == MM_JPEG_CIRQ_SIZE)
+
+/** cirq_enqueue:
+ *
+ * Arguments:
+ * @q: circular queue
+ * @data: data to be inserted
+ *
+ * Return:
+ * true/false
+ *
+ * Description:
+ * enqueue an element into circular queue
+ *
+ **/
+#define cirq_enqueue(q, type, data) ({ \
+ int rc = 0; \
+ pthread_mutex_lock(&q->lock); \
+ if (cirq_full(q)) { \
+ rc = -1; \
+ } else { \
+ q->type[q->rear] = data; \
+ q->rear = (q->rear + 1) % MM_JPEG_CIRQ_SIZE; \
+ q->count++; \
+ } \
+ pthread_mutex_unlock(&q->lock); \
+ rc; \
+})
+
+/** cirq_dequeue:
+ *
+ * Arguments:
+ * @q: circular queue
+ * @data: data to be popped
+ *
+ * Return:
+ * true/false
+ *
+ * Description:
+ * dequeue an element from the circular queue
+ *
+ **/
+#define cirq_dequeue(q, type, data) ({ \
+ int rc = 0; \
+ pthread_mutex_lock(&q->lock); \
+ if (cirq_empty(q)) { \
+ pthread_mutex_unlock(&q->lock); \
+ rc = -1; \
+ } else { \
+ data = q->type[q->front]; \
+ q->count--; \
+ } \
+ pthread_mutex_unlock(&q->lock); \
+ rc; \
+})
+
+
+typedef union {
+ uint32_t u32;
+ void* p;
+} mm_jpeg_q_data_t;
+
+ typedef struct {
+ struct cam_list list;
+ mm_jpeg_q_data_t data;
+} mm_jpeg_q_node_t;
+
+typedef struct {
+ mm_jpeg_q_node_t head; /* dummy head */
+ uint32_t size;
+ pthread_mutex_t lock;
+} mm_jpeg_queue_t;
+
+typedef enum {
+ MM_JPEG_CMD_TYPE_JOB, /* job cmd */
+ MM_JPEG_CMD_TYPE_EXIT, /* EXIT cmd for exiting jobMgr thread */
+ MM_JPEG_CMD_TYPE_DECODE_JOB,
+ MM_JPEG_CMD_TYPE_MAX
+} mm_jpeg_cmd_type_t;
+
+typedef struct mm_jpeg_job_session {
+ uint32_t client_hdl; /* client handler */
+ uint32_t jobId; /* job ID */
+ uint32_t sessionId; /* session ID */
+ mm_jpeg_encode_params_t params; /* encode params */
+ mm_jpeg_decode_params_t dec_params; /* encode params */
+ mm_jpeg_encode_job_t encode_job; /* job description */
+ mm_jpeg_decode_job_t decode_job;
+ pthread_t encode_pid; /* encode thread handler*/
+
+ void *jpeg_obj; /* ptr to mm_jpeg_obj */
+ jpeg_job_status_t job_status; /* job status */
+
+ int state_change_pending; /* flag to indicate if state change is pending */
+ OMX_ERRORTYPE error_flag; /* variable to indicate error during encoding */
+ mm_jpeg_abort_state_t abort_state; /* variable to indicate abort during encoding */
+
+ /* OMX related */
+ OMX_HANDLETYPE omx_handle; /* handle to omx engine */
+ OMX_CALLBACKTYPE omx_callbacks; /* callbacks to omx engine */
+
+ /* buffer headers */
+ OMX_BUFFERHEADERTYPE *p_in_omx_buf[MM_JPEG_MAX_BUF];
+ OMX_BUFFERHEADERTYPE *p_in_omx_thumb_buf[MM_JPEG_MAX_BUF];
+ OMX_BUFFERHEADERTYPE *p_out_omx_buf[MM_JPEG_MAX_BUF];
+ OMX_BUFFERHEADERTYPE *p_in_rot_omx_buf[MM_JPEG_MAX_BUF];
+ OMX_BUFFERHEADERTYPE *p_in_rot_omx_thumb_buf[MM_JPEG_MAX_BUF];
+
+ OMX_PARAM_PORTDEFINITIONTYPE inputPort;
+ OMX_PARAM_PORTDEFINITIONTYPE outputPort;
+ OMX_PARAM_PORTDEFINITIONTYPE inputTmbPort;
+
+ /* event locks */
+ pthread_mutex_t lock;
+ pthread_cond_t cond;
+
+ QEXIF_INFO_DATA exif_info_local[MAX_EXIF_TABLE_ENTRIES]; //all exif tags for JPEG encoder
+ int exif_count_local;
+
+ mm_jpeg_cirq_t cb_q;
+ int32_t ebd_count;
+ int32_t fbd_count;
+
+ /* this flag represents whether the job is active */
+ OMX_BOOL active;
+
+ /* this flag indicates if the configration is complete */
+ OMX_BOOL config;
+
+ /* job history count to generate unique id */
+ unsigned int job_hist;
+
+ OMX_BOOL encoding;
+
+ buffer_t work_buffer;
+ /* src rotate ion bufs */
+ buffer_t src_rot_ion_buffer[MM_JPEG_MAX_BUF];
+
+ OMX_EVENTTYPE omxEvent;
+ int event_pending;
+
+ uint8_t *meta_enc_key;
+ size_t meta_enc_keylen;
+
+ struct mm_jpeg_job_session *next_session;
+
+ uint32_t curr_out_buf_idx;
+
+ uint32_t num_omx_sessions;
+ OMX_BOOL auto_out_buf;
+
+ mm_jpeg_queue_t *session_handle_q;
+ mm_jpeg_queue_t *out_buf_q;
+
+ int thumb_from_main;
+ uint32_t job_index;
+
+ /* lib2d rotation flag*/
+ uint32_t lib2d_rotation_flag;
+
+ /* num of buf for input src rotation */
+ uint32_t num_src_rot_bufs;
+
+ /* src rotate img bufs */
+ mm_jpeg_buf_t src_rot_main_buf[MM_JPEG_MAX_BUF];
+
+ /* lib2d handle*/
+ void *lib2d_handle;
+} mm_jpeg_job_session_t;
+
+typedef struct {
+ mm_jpeg_encode_job_t encode_job;
+ uint32_t job_id;
+ uint32_t client_handle;
+} mm_jpeg_encode_job_info_t;
+
+typedef struct {
+ mm_jpeg_decode_job_t decode_job;
+ uint32_t job_id;
+ uint32_t client_handle;
+} mm_jpeg_decode_job_info_t;
+
+typedef struct {
+ mm_jpeg_cmd_type_t type;
+ union {
+ mm_jpeg_encode_job_info_t enc_info;
+ mm_jpeg_decode_job_info_t dec_info;
+ };
+} mm_jpeg_job_q_node_t;
+
+typedef struct {
+ uint8_t is_used; /* flag: if is a valid client */
+ uint32_t client_handle; /* client handle */
+ mm_jpeg_job_session_t session[MM_JPEG_MAX_SESSION];
+ pthread_mutex_t lock; /* job lock */
+} mm_jpeg_client_t;
+
+typedef struct {
+ pthread_t pid; /* job cmd thread ID */
+ cam_semaphore_t job_sem; /* semaphore for job cmd thread */
+ mm_jpeg_queue_t job_queue; /* queue for job to do */
+} mm_jpeg_job_cmd_thread_t;
+
+#define MAX_JPEG_CLIENT_NUM 8
+typedef struct mm_jpeg_obj_t {
+ /* ClientMgr */
+ int num_clients; /* num of clients */
+ mm_jpeg_client_t clnt_mgr[MAX_JPEG_CLIENT_NUM]; /* client manager */
+
+ /* JobMkr */
+ pthread_mutex_t job_lock; /* job lock */
+ mm_jpeg_job_cmd_thread_t job_mgr; /* job mgr thread including todo_q*/
+ mm_jpeg_queue_t ongoing_job_q; /* queue for ongoing jobs */
+ buffer_t ionBuffer[MM_JPEG_CONCURRENT_SESSIONS_COUNT];
+
+
+ /* Max pic dimension for work buf calc*/
+ uint32_t max_pic_w;
+ uint32_t max_pic_h;
+#ifdef LOAD_ADSP_RPC_LIB
+ void *adsprpc_lib_handle;
+#endif
+
+ uint32_t work_buf_cnt;
+
+ uint32_t num_sessions;
+ uint32_t reuse_reproc_buffer;
+
+ cam_jpeg_metadata_t *jpeg_metadata;
+
+ /* Pointer to the session in progress*/
+ mm_jpeg_job_session_t *p_session_inprogress;
+
+ // dummy OMX handle
+ OMX_HANDLETYPE dummy_handle;
+} mm_jpeg_obj;
+
+/** mm_jpeg_pending_func_t:
+ *
+ * Intermediate function for transition change
+ **/
+typedef OMX_ERRORTYPE (*mm_jpeg_transition_func_t)(void *);
+
+extern int32_t mm_jpeg_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj);
+extern uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_t* job,
+ uint32_t* jobId);
+extern int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+ uint32_t jobId);
+extern int32_t mm_jpeg_close(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl);
+extern int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ mm_jpeg_encode_params_t *p_params,
+ uint32_t* p_session_id);
+extern int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj,
+ uint32_t session_id);
+
+extern int32_t mm_jpegdec_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpegdec_deinit(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj);
+extern int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpegdec_start_decode_job(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_t* job,
+ uint32_t* jobId);
+
+extern int32_t mm_jpegdec_create_session(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ mm_jpeg_decode_params_t *p_params,
+ uint32_t* p_session_id);
+
+extern int32_t mm_jpegdec_destroy_session_by_id(mm_jpeg_obj *my_obj,
+ uint32_t session_id);
+
+extern int32_t mm_jpegdec_abort_job(mm_jpeg_obj *my_obj,
+ uint32_t jobId);
+
+int32_t mm_jpegdec_process_decoding_job(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_q_node_t* job_node);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index);
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler);
+
+/* basic queue functions */
+extern int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue,
+ mm_jpeg_q_data_t data);
+extern int32_t mm_jpeg_queue_enq_head(mm_jpeg_queue_t* queue,
+ mm_jpeg_q_data_t data);
+extern mm_jpeg_q_data_t mm_jpeg_queue_deq(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue);
+extern uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue);
+extern mm_jpeg_q_data_t mm_jpeg_queue_peek(mm_jpeg_queue_t* queue);
+extern int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+ exif_tag_type_t type, uint32_t count, void *data);
+extern int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data);
+extern int process_meta_data(metadata_buffer_t *p_meta,
+ QOMX_EXIF_INFO *exif_info, mm_jpeg_exif_params_t *p_cam3a_params,
+ cam_hal_version_t hal_version);
+
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+ OMX_STATETYPE new_state,
+ mm_jpeg_transition_func_t p_exec);
+
+int map_jpeg_format(mm_jpeg_color_format color_fmt);
+
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session);
+/**
+ *
+ * special queue functions for job queue
+ **/
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+ mm_jpeg_queue_t* queue, uint32_t client_hdl);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+ mm_jpeg_queue_t* queue, uint32_t job_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+ mm_jpeg_queue_t* queue, uint32_t session_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+ mm_jpeg_queue_t* queue, uint32_t job_id);
+
+
+/** mm_jpeg_queue_func_t:
+ *
+ * Intermediate function for queue operation
+ **/
+typedef void (*mm_jpeg_queue_func_t)(void *);
+
+/** mm_jpeg_exif_flash_mode:
+ *
+ * Exif flash mode values
+ **/
+typedef enum {
+ MM_JPEG_EXIF_FLASH_MODE_ON = 0x1,
+ MM_JPEG_EXIF_FLASH_MODE_OFF = 0x2,
+ MM_JPEG_EXIF_FLASH_MODE_AUTO = 0x3,
+ MM_JPEG_EXIF_FLASH_MODE_MAX
+} mm_jpeg_exif_flash_mode;
+
+#endif /* MM_JPEG_H_ */
+
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
new file mode 100644
index 0000000..2269537
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
@@ -0,0 +1,55 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_DBG_H__
+#define __MM_JPEG_DBG_H__
+
+#ifdef QCAMERA_REDEFINE_LOG
+#define CAM_MODULE CAM_JPEG_MODULE
+#include "mm_camera_dbg.h"
+#endif
+
+extern volatile uint32_t gKpiDebugLevel;
+
+#ifndef KPI_DEBUG
+#define KPI_DEBUG
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#include <cutils/trace.h>
+
+#define KPI_APT 1
+#define KPI_DBG 2
+
+#define KPI_ATRACE_INT(name,val) ({\
+if (gKpiDebugLevel >= KPI_APT) { \
+ atrace_int(ATRACE_TAG, name, val); \
+}\
+})
+
+#endif
+#endif /* __MM_JPEG_DBG_H__ */
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h
new file mode 100644
index 0000000..d2ca63d
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h
@@ -0,0 +1,127 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INLINES_H_
+#define MM_JPEG_INLINES_H_
+
+// JPEG dependencies
+#include "mm_jpeg.h"
+
+/** mm_jpeg_get_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_idx: client index
+ *
+ * Return:
+ * job index
+ *
+ * Description:
+ * Get job index by client id
+ *
+ **/
+static inline mm_jpeg_job_session_t *mm_jpeg_get_session(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+ mm_jpeg_job_session_t *p_session = NULL;
+ int client_idx = GET_CLIENT_IDX(job_id);
+ int session_idx= GET_SESSION_IDX(job_id);
+
+ LOGD("client_idx %d session_idx %d",
+ client_idx, session_idx);
+ if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+ (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+ LOGE("invalid job id %x",
+ job_id);
+ return NULL;
+ }
+ pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+ p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+ pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+ return p_session;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_idx: client index
+ *
+ * Return:
+ * job index
+ *
+ * Description:
+ * Get job index by client id
+ *
+ **/
+static inline int mm_jpeg_get_new_session_idx(mm_jpeg_obj *my_obj, int client_idx,
+ mm_jpeg_job_session_t **pp_session)
+{
+ int i = 0;
+ int index = -1;
+ for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+ pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+ if (!my_obj->clnt_mgr[client_idx].session[i].active) {
+ *pp_session = &my_obj->clnt_mgr[client_idx].session[i];
+ my_obj->clnt_mgr[client_idx].session[i].active = OMX_TRUE;
+ index = i;
+ pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+ break;
+ }
+ pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+ }
+ return index;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_idx: client index
+ *
+ * Return:
+ * job index
+ *
+ * Description:
+ * Get job index by client id
+ *
+ **/
+static inline void mm_jpeg_remove_session_idx(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+ int client_idx = GET_CLIENT_IDX(job_id);
+ int session_idx= GET_SESSION_IDX(job_id);
+ LOGD("client_idx %d session_idx %d",
+ client_idx, session_idx);
+ pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+ my_obj->clnt_mgr[client_idx].session[session_idx].active = OMX_FALSE;
+ pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+}
+
+
+
+#endif /* MM_JPEG_INLINES_H_ */
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h
new file mode 100644
index 0000000..96b70d9
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h
@@ -0,0 +1,105 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_IONBUF_H__
+#define __MM_JPEG_IONBUF_H__
+
+// System dependencies
+#include <linux/msm_ion.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+
+typedef struct {
+ struct ion_fd_data ion_info_fd;
+ struct ion_allocation_data alloc;
+ int p_pmem_fd;
+ size_t size;
+ int ion_fd;
+ uint8_t *addr;
+} buffer_t;
+
+/** buffer_allocate:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * buffer address
+ *
+ * Description:
+ * allocates ION buffer
+ *
+ **/
+void* buffer_allocate(buffer_t *p_buffer, int cached);
+
+/** buffer_deallocate:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * error val
+ *
+ * Description:
+ * deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_t *p_buffer);
+
+/** buffer_invalidate:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * error val
+ *
+ * Description:
+ * Invalidates the cached buffer
+ *
+ **/
+int buffer_invalidate(buffer_t *p_buffer);
+
+/** buffer_clean:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * error val
+ *
+ * Description:
+ * clean the cached buffer
+ *
+ **/
+int buffer_clean(buffer_t *p_buffer);
+
+#endif
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h
new file mode 100644
index 0000000..6e8424c
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h
@@ -0,0 +1,45 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_MPO_H_
+#define MM_JPEG_MPO_H_
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "qmpo.h"
+
+#define TRUE 1
+#define FALSE 0
+
+extern int mm_jpeg_mpo_compose(mm_jpeg_mpo_info_t *mpo_info);
+
+extern int get_mpo_size(mm_jpeg_output_t jpeg_buffer[MM_JPEG_MAX_MPO_IMAGES],
+ int num_of_images);
+
+#endif
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
new file mode 100644
index 0000000..19a8b29
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
@@ -0,0 +1,3788 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <math.h>
+#define PRCTL_H <SYSTEM_HEADER_PREFIX/prctl.h>
+#include PRCTL_H
+
+#ifdef LOAD_ADSP_RPC_LIB
+#include <dlfcn.h>
+#include <stdlib.h>
+#endif
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_inlines.h"
+#ifdef LIB2D_ROTATION_ENABLE
+#include "mm_lib2d.h"
+#endif
+
+#define ENCODING_MODE_PARALLEL 1
+
+#define META_KEYFILE QCAMERA_DUMP_FRM_LOCATION"metadata.key"
+
+/**
+ * minimal resolution needed for normal mode of ops
+ */
+#define MM_JPEG_MIN_NOM_RESOLUTION 7680000 /*8MP*/
+
+#ifdef MM_JPEG_USE_PIPELINE
+#undef MM_JPEG_CONCURRENT_SESSIONS_COUNT
+#define MM_JPEG_CONCURRENT_SESSIONS_COUNT 1
+#endif
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_EVENTTYPE eEvent,
+ OMX_U32 nData1,
+ OMX_U32 nData2,
+ OMX_PTR pEventData);
+
+static int32_t mm_jpegenc_destroy_job(mm_jpeg_job_session_t *p_session);
+static void mm_jpegenc_job_done(mm_jpeg_job_session_t *p_session);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_dst_ptr(
+ mm_jpeg_queue_t* queue, void * dst_ptr);
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session);
+
+/** mm_jpeg_get_comp_name:
+ *
+ * Arguments:
+ * None
+ *
+ * Return:
+ * Encoder component name
+ *
+ * Description:
+ * Get the name of omx component to be used for jpeg encoding
+ *
+ **/
+inline char* mm_jpeg_get_comp_name()
+{
+#ifdef MM_JPEG_USE_PIPELINE
+ return "OMX.qcom.image.jpeg.encoder_pipeline";
+#else
+ return "OMX.qcom.image.jpeg.encoder";
+#endif
+}
+
+/** mm_jpeg_session_send_buffers:
+ *
+ * Arguments:
+ * @data: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_send_buffers(void *data)
+{
+ uint32_t i = 0;
+ mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ QOMX_BUFFER_INFO lbuffer_info;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+ memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+
+ if (p_session->lib2d_rotation_flag) {
+ for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+ lbuffer_info.fd = (OMX_U32)p_session->src_rot_main_buf[i].fd;
+ LOGD("Source rot buffer %d", i);
+ ret = OMX_UseBuffer(p_session->omx_handle,
+ &(p_session->p_in_rot_omx_buf[i]), 0,
+ &lbuffer_info, p_session->src_rot_main_buf[i].buf_size,
+ p_session->src_rot_main_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ } else {
+ for (i = 0; i < p_params->num_src_bufs; i++) {
+ LOGD("Source buffer %d", i);
+ lbuffer_info.fd = (OMX_U32)p_params->src_main_buf[i].fd;
+ ret = OMX_UseBuffer(p_session->omx_handle,
+ &(p_session->p_in_omx_buf[i]), 0,
+ &lbuffer_info, p_params->src_main_buf[i].buf_size,
+ p_params->src_main_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ }
+
+ if (p_session->params.encode_thumbnail) {
+ if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+ for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+ LOGD("Source rot buffer thumb %d", i);
+ lbuffer_info.fd = (OMX_U32)p_session->src_rot_main_buf[i].fd;
+ ret = OMX_UseBuffer(p_session->omx_handle,
+ &(p_session->p_in_rot_omx_thumb_buf[i]), 2,
+ &lbuffer_info, p_session->src_rot_main_buf[i].buf_size,
+ p_session->src_rot_main_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ } else {
+ for (i = 0; i < p_params->num_tmb_bufs; i++) {
+ LOGD("Source tmb buffer %d", i);
+ lbuffer_info.fd = (OMX_U32)p_params->src_thumb_buf[i].fd;
+ ret = OMX_UseBuffer(p_session->omx_handle,
+ &(p_session->p_in_omx_thumb_buf[i]), 2,
+ &lbuffer_info, p_params->src_thumb_buf[i].buf_size,
+ p_params->src_thumb_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ }
+ }
+
+ for (i = 0; i < p_params->num_dst_bufs; i++) {
+ LOGD("Dest buffer %d", i);
+ ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+ 1, NULL, p_params->dest_buf[i].buf_size,
+ p_params->dest_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error");
+ return ret;
+ }
+ }
+
+ return ret;
+}
+
+
+/** mm_jpeg_session_free_buffers:
+ *
+ * Arguments:
+ * @data: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_free_buffers(void *data)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ uint32_t i = 0;
+ mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+
+ if (p_session->lib2d_rotation_flag) {
+ for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+ LOGD("Source rot buffer %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 0,
+ p_session->p_in_rot_omx_buf[i]);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ } else {
+ for (i = 0; i < p_params->num_src_bufs; i++) {
+ LOGD("Source buffer %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 0,
+ p_session->p_in_omx_buf[i]);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ }
+
+ if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+ for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+ LOGD("Source rot buffer thumb %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 2,
+ p_session->p_in_rot_omx_thumb_buf[i]);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ } else {
+ for (i = 0; i < p_params->num_tmb_bufs; i++) {
+ LOGD("Source buffer %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 2,
+ p_session->p_in_omx_thumb_buf[i]);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+ }
+
+ for (i = 0; i < p_params->num_dst_bufs; i++) {
+ LOGD("Dest buffer %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+ if (ret) {
+ LOGE("Error");
+ return ret;
+ }
+ }
+ return ret;
+}
+
+
+
+
+/** mm_jpeg_session_change_state:
+ *
+ * Arguments:
+ * @p_session: job session
+ * @new_state: new state to be transitioned to
+ * @p_exec: transition function
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * This method is used for state transition
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+ OMX_STATETYPE new_state,
+ mm_jpeg_transition_func_t p_exec)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ OMX_STATETYPE current_state;
+ LOGD("new_state %d p_exec %p",
+ new_state, p_exec);
+
+
+ pthread_mutex_lock(&p_session->lock);
+
+ ret = OMX_GetState(p_session->omx_handle, &current_state);
+
+ if (ret) {
+ pthread_mutex_unlock(&p_session->lock);
+ return ret;
+ }
+
+ if (current_state == new_state) {
+ pthread_mutex_unlock(&p_session->lock);
+ return OMX_ErrorNone;
+ }
+
+ p_session->state_change_pending = OMX_TRUE;
+ pthread_mutex_unlock(&p_session->lock);
+ ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+ new_state, NULL);
+ pthread_mutex_lock(&p_session->lock);
+ if (ret) {
+ LOGE("Error %d", ret);
+ pthread_mutex_unlock(&p_session->lock);
+ return OMX_ErrorIncorrectStateTransition;
+ }
+ if ((OMX_ErrorNone != p_session->error_flag) &&
+ (OMX_ErrorOverflow != p_session->error_flag)) {
+ LOGE("Error %d", p_session->error_flag);
+ pthread_mutex_unlock(&p_session->lock);
+ return p_session->error_flag;
+ }
+ if (p_exec) {
+ ret = p_exec(p_session);
+ if (ret) {
+ LOGE("Error %d", ret);
+ pthread_mutex_unlock(&p_session->lock);
+ return ret;
+ }
+ }
+ if (p_session->state_change_pending) {
+ LOGL("before wait");
+ pthread_cond_wait(&p_session->cond, &p_session->lock);
+ LOGL("after wait");
+ }
+ pthread_mutex_unlock(&p_session->lock);
+ return ret;
+}
+
+/** mm_jpeg_session_create:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error types
+ *
+ * Description:
+ * Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_create(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+ pthread_mutex_init(&p_session->lock, NULL);
+ pthread_cond_init(&p_session->cond, NULL);
+ cirq_reset(&p_session->cb_q);
+ p_session->state_change_pending = OMX_FALSE;
+ p_session->abort_state = MM_JPEG_ABORT_NONE;
+ p_session->error_flag = OMX_ErrorNone;
+ p_session->ebd_count = 0;
+ p_session->fbd_count = 0;
+ p_session->encode_pid = -1;
+ p_session->config = OMX_FALSE;
+ p_session->exif_count_local = 0;
+ p_session->auto_out_buf = OMX_FALSE;
+
+ p_session->omx_callbacks.EmptyBufferDone = mm_jpeg_ebd;
+ p_session->omx_callbacks.FillBufferDone = mm_jpeg_fbd;
+ p_session->omx_callbacks.EventHandler = mm_jpeg_event_handler;
+
+ p_session->thumb_from_main = 0;
+#ifdef MM_JPEG_USE_PIPELINE
+ p_session->thumb_from_main = !p_session->params.thumb_from_postview;
+#endif
+
+ rc = OMX_GetHandle(&p_session->omx_handle,
+ mm_jpeg_get_comp_name(),
+ (void *)p_session,
+ &p_session->omx_callbacks);
+ if (OMX_ErrorNone != rc) {
+ LOGE("OMX_GetHandle failed (%d)", rc);
+ return rc;
+ }
+
+ my_obj->num_sessions++;
+
+ return rc;
+}
+
+
+
+/** mm_jpeg_session_destroy:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * Destroy a jpeg encode session
+ *
+ **/
+void mm_jpeg_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+ OMX_STATETYPE state;
+ uint32_t i;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+ LOGD("E");
+ if (NULL == p_session->omx_handle) {
+ LOGE("invalid handle");
+ return;
+ }
+
+ rc = OMX_GetState(p_session->omx_handle, &state);
+
+ //Check state before state transition
+ if ((state == OMX_StateExecuting) || (state == OMX_StatePause)) {
+ rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+ if (rc) {
+ LOGE("Error");
+ }
+ }
+
+ rc = OMX_GetState(p_session->omx_handle, &state);
+
+ if (state == OMX_StateIdle) {
+ rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+ mm_jpeg_session_free_buffers);
+ if (rc) {
+ LOGE("Error");
+ }
+ }
+
+ if (p_session->lib2d_rotation_flag) {
+ for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+ if (p_session->src_rot_ion_buffer[i].addr) {
+ buffer_deallocate(&p_session->src_rot_ion_buffer[i]);
+ }
+ }
+ }
+
+ /* If current session is the session in progress
+ set session in progress pointer to null*/
+ p_session->config = OMX_FALSE;
+ if (my_obj->p_session_inprogress == p_session) {
+ my_obj->p_session_inprogress = NULL;
+ }
+
+ rc = OMX_FreeHandle(p_session->omx_handle);
+ if (0 != rc) {
+ LOGE("OMX_FreeHandle failed (%d)", rc);
+ }
+ p_session->omx_handle = NULL;
+
+ pthread_mutex_destroy(&p_session->lock);
+ pthread_cond_destroy(&p_session->cond);
+
+ if (NULL != p_session->meta_enc_key) {
+ free(p_session->meta_enc_key);
+ p_session->meta_enc_key = NULL;
+ }
+
+ my_obj->num_sessions--;
+
+ // Destroy next session
+ if (p_session->next_session) {
+ mm_jpeg_session_destroy(p_session->next_session);
+ }
+
+ LOGD("Session destroy successful. X");
+}
+
+
+
+/** mm_jpeg_session_config_main_buffer_offset:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure the buffer offsets
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_buffer_offset(
+ mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = 0;
+ OMX_INDEXTYPE buffer_index;
+ QOMX_YUV_FRAME_INFO frame_info;
+ size_t totalSize = 0;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+ mm_jpeg_buf_t *p_src_buf =
+ &p_params->src_main_buf[0];
+
+ memset(&frame_info, 0x0, sizeof(QOMX_YUV_FRAME_INFO));
+
+ frame_info.cbcrStartOffset[0] = p_src_buf->offset.mp[0].len;
+ frame_info.cbcrStartOffset[1] = p_src_buf->offset.mp[1].len;
+ if (!p_session->lib2d_rotation_flag) {
+ frame_info.yOffset = p_src_buf->offset.mp[0].offset;
+ frame_info.cbcrOffset[0] = p_src_buf->offset.mp[1].offset;
+ frame_info.cbcrOffset[1] = p_src_buf->offset.mp[2].offset;
+ }
+ totalSize = p_src_buf->buf_size;
+
+ rc = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME, &buffer_index);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+
+ LOGD("yOffset = %d, cbcrOffset = (%d %d), totalSize = %zd,"
+ "cbcrStartOffset = (%d %d)",
+ (int)frame_info.yOffset,
+ (int)frame_info.cbcrOffset[0],
+ (int)frame_info.cbcrOffset[1],
+ totalSize,
+ (int)frame_info.cbcrStartOffset[0],
+ (int)frame_info.cbcrStartOffset[1]);
+
+ rc = OMX_SetParameter(p_session->omx_handle, buffer_index, &frame_info);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+ return rc;
+}
+
+/** mm_jpeg_encoding_mode:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure the serial or parallel encoding
+ * mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_encoding_mode(
+ mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = 0;
+ OMX_INDEXTYPE indextype;
+ QOMX_ENCODING_MODE encoding_mode;
+
+ rc = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_ENCODING_MODE_NAME, &indextype);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+
+ if (ENCODING_MODE_PARALLEL) {
+ encoding_mode = OMX_Parallel_Encoding;
+ } else {
+ encoding_mode = OMX_Serial_Encoding;
+ }
+ LOGD("encoding mode = %d ",
+ (int)encoding_mode);
+ rc = OMX_SetParameter(p_session->omx_handle, indextype, &encoding_mode);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+ return rc;
+}
+
+/** mm_jpeg_get_speed:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * ops speed type for jpeg
+ *
+ * Description:
+ * Configure normal or high speed jpeg
+ *
+ **/
+QOMX_JPEG_SPEED_MODE mm_jpeg_get_speed(
+ mm_jpeg_job_session_t* p_session)
+{
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+ cam_dimension_t *p_dim = &p_params->main_dim.src_dim;
+ if (p_params->burst_mode ||
+ (MM_JPEG_MIN_NOM_RESOLUTION < (p_dim->width * p_dim->height))) {
+ return QOMX_JPEG_SPEED_MODE_HIGH;
+ }
+ return QOMX_JPEG_SPEED_MODE_NORMAL;
+}
+
+/** mm_jpeg_speed_mode:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure normal or high speed jpeg
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_speed_mode(
+ mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = 0;
+ OMX_INDEXTYPE indextype;
+ QOMX_JPEG_SPEED jpeg_speed;
+
+ rc = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_JPEG_SPEED_NAME, &indextype);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+
+ jpeg_speed.speedMode = mm_jpeg_get_speed(p_session);
+ LOGH("speed %d", jpeg_speed.speedMode);
+
+ rc = OMX_SetParameter(p_session->omx_handle, indextype, &jpeg_speed);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+ return rc;
+}
+
+/** mm_jpeg_get_mem:
+ *
+ * Arguments:
+ * @p_out_buf : jpeg output buffer
+ * @p_jpeg_session: job session
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * gets the jpeg output buffer
+ *
+ **/
+static int32_t mm_jpeg_get_mem(
+ omx_jpeg_ouput_buf_t *p_out_buf, void* p_jpeg_session)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *)p_jpeg_session;
+ mm_jpeg_encode_params_t *p_params = NULL;
+ mm_jpeg_encode_job_t *p_encode_job = NULL;
+
+ if (!p_session) {
+ LOGE("Invalid input");
+ return -1;
+ }
+ p_params = &p_session->params;
+ p_encode_job = &p_session->encode_job;
+ if (!p_params || !p_encode_job || !p_params->get_memory) {
+ LOGE("Invalid jpeg encode params");
+ return -1;
+ }
+ p_params->get_memory(p_out_buf);
+ p_encode_job->ref_count++;
+ p_encode_job->alloc_out_buffer = p_out_buf;
+ LOGD("ref_count %d p_out_buf %p",
+ p_encode_job->ref_count, p_out_buf);
+ return rc;
+}
+
+/** mm_jpeg_put_mem:
+ *
+ * Arguments:
+ * @p_jpeg_session: job session
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * releases the jpeg output buffer
+ *
+ **/
+static int32_t mm_jpeg_put_mem(void* p_jpeg_session)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *)p_jpeg_session;
+ mm_jpeg_encode_params_t *p_params = NULL;
+ mm_jpeg_encode_job_t *p_encode_job = NULL;
+
+ if (!p_session) {
+ LOGE("Invalid input");
+ return -1;
+ }
+ p_params = &p_session->params;
+ p_encode_job = &p_session->encode_job;
+
+ if (!p_params->get_memory) {
+ LOGD("get_mem not defined, ignore put mem");
+ return 0;
+ }
+ if (!p_params || !p_encode_job || !p_params->put_memory) {
+ LOGE("Invalid jpeg encode params");
+ return -1;
+ }
+ if ((MM_JPEG_ABORT_NONE != p_session->abort_state) &&
+ p_encode_job->ref_count) {
+ omx_jpeg_ouput_buf_t *p_out_buf =
+ ( omx_jpeg_ouput_buf_t *) p_encode_job->alloc_out_buffer;
+ p_params->put_memory(p_out_buf);
+ p_encode_job->ref_count--;
+ p_encode_job->alloc_out_buffer = NULL;
+ } else if (p_encode_job->ref_count) {
+ p_encode_job->ref_count--;
+ } else {
+ LOGW("Buffer already released %d", p_encode_job->ref_count);
+ rc = -1;
+ }
+ LOGD("ref_count %d p_out_buf %p",
+ p_encode_job->ref_count, p_encode_job->alloc_out_buffer);
+ return rc;
+}
+
+/** mm_jpeg_mem_ops:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure the serial or parallel encoding
+ * mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_mem_ops(
+ mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = 0;
+ OMX_INDEXTYPE indextype;
+ QOMX_MEM_OPS mem_ops;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+ if (p_params->get_memory) {
+ mem_ops.get_memory = mm_jpeg_get_mem;
+ } else {
+ mem_ops.get_memory = NULL;
+ LOGH("HAL get_mem handler undefined");
+ }
+
+ mem_ops.psession = p_session;
+ rc = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_MEM_OPS_NAME, &indextype);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+
+ rc = OMX_SetParameter(p_session->omx_handle, indextype, &mem_ops);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+ return rc;
+}
+
+/** mm_jpeg_metadata:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Pass meta data
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_metadata(
+ mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+ OMX_INDEXTYPE indexType;
+ QOMX_METADATA lMeta;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+ rc = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_METADATA_NAME, &indexType);
+
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+
+ lMeta.metadata = (OMX_U8 *)p_jobparams->p_metadata;
+ lMeta.metaPayloadSize = sizeof(*p_jobparams->p_metadata);
+ lMeta.mobicat_mask = p_jobparams->mobicat_mask;
+ lMeta.static_metadata = (OMX_U8 *)my_obj->jpeg_metadata;
+
+ rc = OMX_SetConfig(p_session->omx_handle, indexType, &lMeta);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+ return OMX_ErrorNone;
+}
+
+/** mm_jpeg_meta_enc_key:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Pass metadata encrypt key
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_meta_enc_key(
+ mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+ OMX_INDEXTYPE indexType;
+ QOMX_META_ENC_KEY lKey;
+
+ lKey.metaKey = p_session->meta_enc_key;
+ lKey.keyLen = p_session->meta_enc_keylen;
+
+ if ((!lKey.metaKey) || (!lKey.keyLen)){
+ LOGD("Key is invalid");
+ return OMX_ErrorNone;
+ }
+
+ rc = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_META_ENC_KEY_NAME, &indexType);
+
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+
+ rc = OMX_SetConfig(p_session->omx_handle, indexType, &lKey);
+ if (rc != OMX_ErrorNone) {
+ LOGE("Failed");
+ return rc;
+ }
+ return OMX_ErrorNone;
+}
+
+/** map_jpeg_format:
+ *
+ * Arguments:
+ * @color_fmt: color format
+ *
+ * Return:
+ * OMX color format
+ *
+ * Description:
+ * Map mmjpeg color format to OMX color format
+ *
+ **/
+int map_jpeg_format(mm_jpeg_color_format color_fmt)
+{
+ switch (color_fmt) {
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+ return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+ return (int)OMX_COLOR_FormatYUV420SemiPlanar;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+ return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+ return (int)OMX_COLOR_FormatYUV422SemiPlanar;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+ return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+ return (int)OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+ return (int)OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+ return (int)OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar;
+ case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+ return (int)OMX_COLOR_FormatMonochrome;
+ default:
+ LOGW("invalid format %d", color_fmt);
+ return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+ }
+}
+
+/** mm_jpeg_get_imgfmt_from_colorfmt:
+ *
+ * Arguments:
+ * @color_fmt: color format
+ *
+ * Return:
+ * cam format
+ *
+ * Description:
+ * Get camera image format from color format
+ *
+ **/
+cam_format_t mm_jpeg_get_imgfmt_from_colorfmt
+ (mm_jpeg_color_format color_fmt)
+{
+ switch (color_fmt) {
+ case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+ return CAM_FORMAT_Y_ONLY;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+ return CAM_FORMAT_YUV_420_NV21;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+ return CAM_FORMAT_YUV_420_NV12;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+ return CAM_FORMAT_YUV_422_NV61;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+ return CAM_FORMAT_YUV_422_NV16;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+ return CAM_FORMAT_YUV_444_NV42;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+ return CAM_FORMAT_YUV_444_NV24;
+ default:
+ return CAM_FORMAT_Y_ONLY;
+ }
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+ OMX_CONFIG_ROTATIONTYPE rotate;
+
+ mm_jpeg_buf_t *p_src_buf =
+ &p_params->src_main_buf[0];
+
+ p_session->inputPort.nPortIndex = 0;
+ p_session->outputPort.nPortIndex = 1;
+ p_session->inputTmbPort.nPortIndex = 2;
+
+ ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->inputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->inputTmbPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->outputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ if (p_session->lib2d_rotation_flag &&
+ ((p_session->params.rotation == 90) ||
+ (p_session->params.rotation == 270))) {
+ p_session->inputPort.format.image.nFrameWidth =
+ (OMX_U32)p_params->main_dim.src_dim.height;
+ p_session->inputPort.format.image.nFrameHeight =
+ (OMX_U32)p_params->main_dim.src_dim.width;
+ p_session->inputPort.format.image.nStride =
+ p_src_buf->offset.mp[0].scanline;
+ p_session->inputPort.format.image.nSliceHeight =
+ (OMX_U32)p_src_buf->offset.mp[0].stride;
+ } else {
+ p_session->inputPort.format.image.nFrameWidth =
+ (OMX_U32)p_params->main_dim.src_dim.width;
+ p_session->inputPort.format.image.nFrameHeight =
+ (OMX_U32)p_params->main_dim.src_dim.height;
+ p_session->inputPort.format.image.nStride =
+ p_src_buf->offset.mp[0].stride;
+ p_session->inputPort.format.image.nSliceHeight =
+ (OMX_U32)p_src_buf->offset.mp[0].scanline;
+ }
+
+ p_session->inputPort.format.image.eColorFormat =
+ map_jpeg_format(p_params->color_format);
+ p_session->inputPort.nBufferSize =
+ p_params->src_main_buf[0/*p_jobparams->src_index*/].buf_size;
+
+ if (p_session->lib2d_rotation_flag) {
+ p_session->inputPort.nBufferCountActual =
+ (OMX_U32)p_session->num_src_rot_bufs;
+ } else {
+ p_session->inputPort.nBufferCountActual =
+ (OMX_U32)p_params->num_src_bufs;
+ }
+
+ ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->inputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ if (p_session->params.encode_thumbnail) {
+ mm_jpeg_buf_t *p_tmb_buf =
+ &p_params->src_thumb_buf[0];
+ if ((p_session->lib2d_rotation_flag && p_session->thumb_from_main) &&
+ ((p_session->params.rotation == 90) ||
+ (p_session->params.rotation == 270))) {
+ p_session->inputTmbPort.format.image.nFrameWidth =
+ (OMX_U32)p_params->thumb_dim.src_dim.height;
+ p_session->inputTmbPort.format.image.nFrameHeight =
+ (OMX_U32)p_params->thumb_dim.src_dim.width;
+ p_session->inputTmbPort.format.image.nStride =
+ p_tmb_buf->offset.mp[0].scanline;
+ p_session->inputTmbPort.format.image.nSliceHeight =
+ (OMX_U32)p_tmb_buf->offset.mp[0].stride;
+ } else {
+ p_session->inputTmbPort.format.image.nFrameWidth =
+ (OMX_U32)p_params->thumb_dim.src_dim.width;
+ p_session->inputTmbPort.format.image.nFrameHeight =
+ (OMX_U32)p_params->thumb_dim.src_dim.height;
+ p_session->inputTmbPort.format.image.nStride =
+ p_tmb_buf->offset.mp[0].stride;
+ p_session->inputTmbPort.format.image.nSliceHeight =
+ (OMX_U32)p_tmb_buf->offset.mp[0].scanline;
+ }
+
+ p_session->inputTmbPort.format.image.eColorFormat =
+ map_jpeg_format(p_params->thumb_color_format);
+ p_session->inputTmbPort.nBufferSize =
+ p_params->src_thumb_buf[0].buf_size;
+
+ if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+ p_session->inputTmbPort.nBufferCountActual =
+ (OMX_U32)p_session->num_src_rot_bufs;
+ } else {
+ p_session->inputTmbPort.nBufferCountActual =
+ (OMX_U32)p_params->num_tmb_bufs;
+ }
+
+ ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->inputTmbPort);
+
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ // Enable thumbnail port
+ ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+ p_session->inputTmbPort.nPortIndex, NULL);
+
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+ } else {
+ // Disable thumbnail port
+ ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+ p_session->inputTmbPort.nPortIndex, NULL);
+
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+ }
+
+ p_session->outputPort.nBufferSize =
+ p_params->dest_buf[0].buf_size;
+ p_session->outputPort.nBufferCountActual = (OMX_U32)p_params->num_dst_bufs;
+ ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->outputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ /* set rotation */
+ memset(&rotate, 0, sizeof(rotate));
+ rotate.nPortIndex = 1;
+
+ if (p_session->lib2d_rotation_flag) {
+ rotate.nRotation = 0;
+ } else {
+ rotate.nRotation = (OMX_S32)p_params->rotation;
+ }
+
+ ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+ &rotate);
+ if (OMX_ErrorNone != ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ LOGD("Set rotation to %d at port_idx = %d",
+ (int)p_params->rotation, (int)rotate.nPortIndex);
+
+ return ret;
+}
+
+/** mm_jpeg_update_thumbnail_crop
+ *
+ * Arguments:
+ * @p_thumb_dim: thumbnail dimension
+ * @crop_width : flag indicating if width needs to be cropped
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Updates thumbnail crop aspect ratio based on
+ * thumbnail destination aspect ratio.
+ *
+ */
+OMX_ERRORTYPE mm_jpeg_update_thumbnail_crop(mm_jpeg_dim_t *p_thumb_dim,
+ uint8_t crop_width)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ int32_t cropped_width = 0, cropped_height = 0;
+
+ if (crop_width) {
+ // Keep height constant
+ cropped_height = p_thumb_dim->crop.height;
+ cropped_width = floor((cropped_height * p_thumb_dim->dst_dim.width) /
+ p_thumb_dim->dst_dim.height);
+ if (cropped_width % 2) {
+ cropped_width -= 1;
+ }
+ } else {
+ // Keep width constant
+ cropped_width = p_thumb_dim->crop.width;
+ cropped_height = floor((cropped_width * p_thumb_dim->dst_dim.height) /
+ p_thumb_dim->dst_dim.width);
+ if (cropped_height % 2) {
+ cropped_height -= 1;
+ }
+ }
+ p_thumb_dim->crop.left = p_thumb_dim->crop.left +
+ floor((p_thumb_dim->crop.width - cropped_width) / 2);
+ if (p_thumb_dim->crop.left % 2) {
+ p_thumb_dim->crop.left -= 1;
+ }
+ p_thumb_dim->crop.top = p_thumb_dim->crop.top +
+ floor((p_thumb_dim->crop.height - cropped_height) / 2);
+ if (p_thumb_dim->crop.top % 2) {
+ p_thumb_dim->crop.top -= 1;
+ }
+ p_thumb_dim->crop.width = cropped_width;
+ p_thumb_dim->crop.height = cropped_height;
+
+ LOGH("New thumbnail crop: left %d, top %d, crop width %d,"
+ " crop height %d", p_thumb_dim->crop.left,
+ p_thumb_dim->crop.top, p_thumb_dim->crop.width,
+ p_thumb_dim->crop.height);
+
+ return ret;
+}
+
+/** mm_jpeg_omx_config_thumbnail:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_thumbnail(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ QOMX_THUMBNAIL_INFO thumbnail_info;
+ OMX_INDEXTYPE thumb_indextype;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ mm_jpeg_dim_t *p_thumb_dim = &p_jobparams->thumb_dim;
+ mm_jpeg_dim_t *p_main_dim = &p_jobparams->main_dim;
+ QOMX_YUV_FRAME_INFO *p_frame_info = &thumbnail_info.tmbOffset;
+ mm_jpeg_buf_t *p_tmb_buf = &p_params->src_thumb_buf[p_jobparams->thumb_index];
+
+ LOGH("encode_thumbnail %u",
+ p_params->encode_thumbnail);
+ if (OMX_FALSE == p_params->encode_thumbnail) {
+ return ret;
+ }
+
+ if ((p_thumb_dim->dst_dim.width == 0) || (p_thumb_dim->dst_dim.height == 0)) {
+ LOGE("Error invalid output dim for thumbnail");
+ return OMX_ErrorBadParameter;
+ }
+
+ if ((p_thumb_dim->src_dim.width == 0) || (p_thumb_dim->src_dim.height == 0)) {
+ LOGE("Error invalid input dim for thumbnail");
+ return OMX_ErrorBadParameter;
+ }
+
+ if ((p_thumb_dim->crop.width == 0) || (p_thumb_dim->crop.height == 0)) {
+ p_thumb_dim->crop.width = p_thumb_dim->src_dim.width;
+ p_thumb_dim->crop.height = p_thumb_dim->src_dim.height;
+ }
+
+ /* check crop boundary */
+ if ((p_thumb_dim->crop.width + p_thumb_dim->crop.left > p_thumb_dim->src_dim.width) ||
+ (p_thumb_dim->crop.height + p_thumb_dim->crop.top > p_thumb_dim->src_dim.height)) {
+ LOGE("invalid crop boundary (%d, %d) offset (%d, %d) out of (%d, %d)",
+ p_thumb_dim->crop.width,
+ p_thumb_dim->crop.height,
+ p_thumb_dim->crop.left,
+ p_thumb_dim->crop.top,
+ p_thumb_dim->src_dim.width,
+ p_thumb_dim->src_dim.height);
+ return OMX_ErrorBadParameter;
+ }
+
+ memset(&thumbnail_info, 0x0, sizeof(QOMX_THUMBNAIL_INFO));
+ ret = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_THUMBNAIL_NAME,
+ &thumb_indextype);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+
+ /* fill thumbnail info */
+ thumbnail_info.scaling_enabled = 1;
+ thumbnail_info.input_width = (OMX_U32)p_thumb_dim->src_dim.width;
+ thumbnail_info.input_height = (OMX_U32)p_thumb_dim->src_dim.height;
+ thumbnail_info.rotation = (OMX_U32)p_params->thumb_rotation;
+ thumbnail_info.quality = (OMX_U32)p_params->thumb_quality;
+ thumbnail_info.output_width = (OMX_U32)p_thumb_dim->dst_dim.width;
+ thumbnail_info.output_height = (OMX_U32)p_thumb_dim->dst_dim.height;
+
+ if (p_session->thumb_from_main) {
+
+ if (p_session->lib2d_rotation_flag) {
+ thumbnail_info.rotation = 0;
+ } else {
+ if ((p_session->params.thumb_rotation == 90 ||
+ p_session->params.thumb_rotation == 270) &&
+ (p_session->params.rotation == 0 ||
+ p_session->params.rotation == 180)) {
+
+ thumbnail_info.output_width = (OMX_U32)p_thumb_dim->dst_dim.height;
+ thumbnail_info.output_height = (OMX_U32)p_thumb_dim->dst_dim.width;
+ thumbnail_info.rotation = p_session->params.rotation;
+ }
+ }
+
+ //Thumb FOV should be within main image FOV
+ if (p_thumb_dim->crop.left < p_main_dim->crop.left) {
+ p_thumb_dim->crop.left = p_main_dim->crop.left;
+ }
+
+ if (p_thumb_dim->crop.top < p_main_dim->crop.top) {
+ p_thumb_dim->crop.top = p_main_dim->crop.top;
+ }
+
+ while ((p_thumb_dim->crop.left + p_thumb_dim->crop.width) >
+ (p_main_dim->crop.left + p_main_dim->crop.width)) {
+ if (p_thumb_dim->crop.left == p_main_dim->crop.left) {
+ p_thumb_dim->crop.width = p_main_dim->crop.width;
+ } else {
+ p_thumb_dim->crop.left = p_main_dim->crop.left;
+ }
+ }
+
+ while ((p_thumb_dim->crop.top + p_thumb_dim->crop.height) >
+ (p_main_dim->crop.top + p_main_dim->crop.height)) {
+ if (p_thumb_dim->crop.top == p_main_dim->crop.top) {
+ p_thumb_dim->crop.height = p_main_dim->crop.height;
+ } else {
+ p_thumb_dim->crop.top = p_main_dim->crop.top;
+ }
+ }
+ } else if ((p_thumb_dim->dst_dim.width > p_thumb_dim->src_dim.width) ||
+ (p_thumb_dim->dst_dim.height > p_thumb_dim->src_dim.height)) {
+ LOGE("Incorrect thumbnail dim %dx%d resetting to %dx%d", p_thumb_dim->dst_dim.width,
+ p_thumb_dim->dst_dim.height, p_thumb_dim->src_dim.width,
+ p_thumb_dim->src_dim.height);
+ thumbnail_info.output_width = (OMX_U32)p_thumb_dim->src_dim.width;
+ thumbnail_info.output_height = (OMX_U32)p_thumb_dim->src_dim.height;
+ }
+
+ // If the thumbnail crop aspect ratio image and thumbnail dest aspect
+ // ratio are different, reset the thumbnail crop
+ double thumbcrop_aspect_ratio = (double)p_thumb_dim->crop.width /
+ (double)p_thumb_dim->crop.height;
+ double thumbdst_aspect_ratio = (double)p_thumb_dim->dst_dim.width /
+ (double)p_thumb_dim->dst_dim.height;
+ if ((thumbdst_aspect_ratio - thumbcrop_aspect_ratio) >
+ ASPECT_TOLERANCE) {
+ mm_jpeg_update_thumbnail_crop(p_thumb_dim, 0);
+ } else if ((thumbcrop_aspect_ratio - thumbdst_aspect_ratio) >
+ ASPECT_TOLERANCE) {
+ mm_jpeg_update_thumbnail_crop(p_thumb_dim, 1);
+ }
+
+ // Fill thumbnail crop info
+ thumbnail_info.crop_info.nWidth = (OMX_U32)p_thumb_dim->crop.width;
+ thumbnail_info.crop_info.nHeight = (OMX_U32)p_thumb_dim->crop.height;
+ thumbnail_info.crop_info.nLeft = p_thumb_dim->crop.left;
+ thumbnail_info.crop_info.nTop = p_thumb_dim->crop.top;
+
+ memset(p_frame_info, 0x0, sizeof(*p_frame_info));
+
+ p_frame_info->cbcrStartOffset[0] = p_tmb_buf->offset.mp[0].len;
+ p_frame_info->cbcrStartOffset[1] = p_tmb_buf->offset.mp[1].len;
+ p_frame_info->yOffset = p_tmb_buf->offset.mp[0].offset;
+ p_frame_info->cbcrOffset[0] = p_tmb_buf->offset.mp[1].offset;
+ p_frame_info->cbcrOffset[1] = p_tmb_buf->offset.mp[2].offset;
+
+ if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+ p_frame_info->yOffset = 0;
+ p_frame_info->cbcrOffset[0] = 0;
+ p_frame_info->cbcrOffset[1] = 0;
+ }
+
+ ret = OMX_SetConfig(p_session->omx_handle, thumb_indextype,
+ &thumbnail_info);
+ if (ret) {
+ LOGE("Error");
+ return ret;
+ }
+
+ return ret;
+}
+
+/** mm_jpeg_session_config_main_crop:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure main image crop
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_crop(mm_jpeg_job_session_t *p_session)
+{
+ OMX_CONFIG_RECTTYPE rect_type_in, rect_type_out;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ mm_jpeg_dim_t *dim = &p_jobparams->main_dim;
+
+ if ((dim->crop.width == 0) || (dim->crop.height == 0)) {
+ dim->crop.width = dim->src_dim.width;
+ dim->crop.height = dim->src_dim.height;
+ }
+ /* error check first */
+ if ((dim->crop.width + dim->crop.left > dim->src_dim.width) ||
+ (dim->crop.height + dim->crop.top > dim->src_dim.height)) {
+ LOGE("invalid crop boundary (%d, %d) out of (%d, %d)",
+ dim->crop.width + dim->crop.left,
+ dim->crop.height + dim->crop.top,
+ dim->src_dim.width,
+ dim->src_dim.height);
+ return OMX_ErrorBadParameter;
+ }
+
+ memset(&rect_type_in, 0, sizeof(rect_type_in));
+ memset(&rect_type_out, 0, sizeof(rect_type_out));
+ rect_type_in.nPortIndex = 0;
+ rect_type_out.nPortIndex = 0;
+
+ if ((dim->src_dim.width != dim->crop.width) ||
+ (dim->src_dim.height != dim->crop.height) ||
+ (dim->src_dim.width != dim->dst_dim.width) ||
+ (dim->src_dim.height != dim->dst_dim.height)) {
+ /* Scaler information */
+ rect_type_in.nWidth = CEILING2(dim->crop.width);
+ rect_type_in.nHeight = CEILING2(dim->crop.height);
+ rect_type_in.nLeft = dim->crop.left;
+ rect_type_in.nTop = dim->crop.top;
+
+ if (dim->dst_dim.width && dim->dst_dim.height) {
+ rect_type_out.nWidth = (OMX_U32)dim->dst_dim.width;
+ rect_type_out.nHeight = (OMX_U32)dim->dst_dim.height;
+ }
+ }
+
+ ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonInputCrop,
+ &rect_type_in);
+ if (OMX_ErrorNone != ret) {
+ LOGE("Error");
+ return ret;
+ }
+
+ LOGH("OMX_IndexConfigCommonInputCrop w = %d, h = %d, l = %d, t = %d,"
+ " port_idx = %d",
+ (int)rect_type_in.nWidth, (int)rect_type_in.nHeight,
+ (int)rect_type_in.nLeft, (int)rect_type_in.nTop,
+ (int)rect_type_in.nPortIndex);
+
+ ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonOutputCrop,
+ &rect_type_out);
+ if (OMX_ErrorNone != ret) {
+ LOGE("Error");
+ return ret;
+ }
+ LOGD("OMX_IndexConfigCommonOutputCrop w = %d, h = %d,"
+ " port_idx = %d",
+ (int)rect_type_out.nWidth, (int)rect_type_out.nHeight,
+ (int)rect_type_out.nPortIndex);
+
+ return ret;
+}
+
+/** mm_jpeg_session_config_main:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+ /* config port */
+ LOGD("config port");
+ rc = mm_jpeg_session_config_ports(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config port failed");
+ return rc;
+ }
+
+ /* config buffer offset */
+ LOGD("config main buf offset");
+ rc = mm_jpeg_session_config_main_buffer_offset(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config buffer offset failed");
+ return rc;
+ }
+
+ /* set the encoding mode */
+ rc = mm_jpeg_encoding_mode(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config encoding mode failed");
+ return rc;
+ }
+
+ /* set the metadata encrypt key */
+ rc = mm_jpeg_meta_enc_key(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config session failed");
+ return rc;
+ }
+
+ /* set the mem ops */
+ rc = mm_jpeg_mem_ops(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config mem ops failed");
+ return rc;
+ }
+ /* set the jpeg speed mode */
+ rc = mm_jpeg_speed_mode(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config speed mode failed");
+ return rc;
+ }
+
+ return rc;
+}
+
+/** mm_jpeg_session_config_common:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure common parameters
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_common(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+ OMX_INDEXTYPE exif_idx;
+ OMX_CONFIG_ROTATIONTYPE rotate;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ QOMX_EXIF_INFO exif_info;
+
+ /* set rotation */
+ memset(&rotate, 0, sizeof(rotate));
+ rotate.nPortIndex = 1;
+
+ if (p_session->lib2d_rotation_flag) {
+ rotate.nRotation = 0;
+ } else {
+ rotate.nRotation = (OMX_S32)p_jobparams->rotation;
+ }
+
+ rc = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+ &rotate);
+ if (OMX_ErrorNone != rc) {
+ LOGE("Error %d", rc);
+ return rc;
+ }
+ LOGD("Set rotation to %d at port_idx = %d",
+ (int)p_jobparams->rotation, (int)rotate.nPortIndex);
+
+ /* Set Exif data*/
+ memset(&p_session->exif_info_local[0], 0, sizeof(p_session->exif_info_local));
+ rc = OMX_GetExtensionIndex(p_session->omx_handle, QOMX_IMAGE_EXT_EXIF_NAME,
+ &exif_idx);
+ if (OMX_ErrorNone != rc) {
+ LOGE("Error %d", rc);
+ return rc;
+ }
+
+ LOGD("Num of exif entries passed from HAL: %d",
+ (int)p_jobparams->exif_info.numOfEntries);
+ if (p_jobparams->exif_info.numOfEntries > 0) {
+ rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+ &p_jobparams->exif_info);
+ if (OMX_ErrorNone != rc) {
+ LOGE("Error %d", rc);
+ return rc;
+ }
+ }
+ /*parse aditional exif data from the metadata*/
+ exif_info.numOfEntries = 0;
+ exif_info.exif_data = &p_session->exif_info_local[0];
+ process_meta_data(p_jobparams->p_metadata, &exif_info,
+ &p_jobparams->cam_exif_params, p_jobparams->hal_version);
+ /* After Parse metadata */
+ p_session->exif_count_local = (int)exif_info.numOfEntries;
+
+ if (exif_info.numOfEntries > 0) {
+ /* set exif tags */
+ LOGD("exif tags from metadata count %d",
+ (int)exif_info.numOfEntries);
+
+ rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+ &exif_info);
+ if (OMX_ErrorNone != rc) {
+ LOGE("Error %d", rc);
+ return rc;
+ }
+ }
+
+ return rc;
+}
+
+/** mm_jpeg_session_abort:
+ *
+ * Arguments:
+ * @p_session: jpeg session
+ *
+ * Return:
+ * OMX_BOOL
+ *
+ * Description:
+ * Abort ongoing job
+ *
+ **/
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ int rc = 0;
+
+ LOGD("E");
+ pthread_mutex_lock(&p_session->lock);
+ if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+ pthread_mutex_unlock(&p_session->lock);
+ LOGH("**** ALREADY ABORTED");
+ return 0;
+ }
+ p_session->abort_state = MM_JPEG_ABORT_INIT;
+ if (OMX_TRUE == p_session->encoding) {
+ p_session->state_change_pending = OMX_TRUE;
+
+ LOGH("**** ABORTING");
+ pthread_mutex_unlock(&p_session->lock);
+
+ ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+ OMX_StateIdle, NULL);
+
+ if (ret != OMX_ErrorNone) {
+ LOGE("OMX_SendCommand returned error %d", ret);
+ return 1;
+ }
+ rc = mm_jpegenc_destroy_job(p_session);
+ if (rc != 0) {
+ LOGE("Destroy job returned error %d", rc);
+ }
+
+ pthread_mutex_lock(&p_session->lock);
+ if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+ LOGL("before wait");
+ pthread_cond_wait(&p_session->cond, &p_session->lock);
+ }
+ LOGL("after wait");
+ }
+ p_session->abort_state = MM_JPEG_ABORT_DONE;
+
+ mm_jpeg_put_mem((void *)p_session);
+
+ pthread_mutex_unlock(&p_session->lock);
+
+ // Abort next session
+ if (p_session->next_session) {
+ mm_jpeg_session_abort(p_session->next_session);
+ }
+
+ LOGD("X");
+ return 0;
+}
+
+/** mm_jpeg_config_multi_image_info
+ *
+ * Arguments:
+ * @p_session: encode session
+ *
+ * Return: OMX_ERRORTYPE
+ *
+ * Description:
+ * Configure multi image parameters
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_config_multi_image_info(
+ mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ QOMX_JPEG_MULTI_IMAGE_INFO multi_image_info;
+ OMX_INDEXTYPE multi_image_index;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+ ret = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_MULTI_IMAGE_NAME, &multi_image_index);
+ if (ret) {
+ LOGE("Error getting multi image info extention index %d", ret);
+ return ret;
+ }
+ memset(&multi_image_info, 0, sizeof(multi_image_info));
+ if (p_jobparams->multi_image_info.type == MM_JPEG_TYPE_MPO) {
+ multi_image_info.image_type = QOMX_JPEG_IMAGE_TYPE_MPO;
+ } else {
+ multi_image_info.image_type = QOMX_JPEG_IMAGE_TYPE_JPEG;
+ }
+ multi_image_info.is_primary_image = p_jobparams->multi_image_info.is_primary;
+ multi_image_info.num_of_images = p_jobparams->multi_image_info.num_of_images;
+ multi_image_info.enable_metadata = p_jobparams->multi_image_info.enable_metadata;
+
+ ret = OMX_SetConfig(p_session->omx_handle, multi_image_index,
+ &multi_image_info);
+ if (ret) {
+ LOGE("Error setting multi image config");
+ return ret;
+ }
+ return ret;
+}
+
+/** mm_jpeg_configure_params
+ *
+ * Arguments:
+ * @p_session: encode session
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * Configure the job specific params
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_configure_job_params(
+ mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ OMX_IMAGE_PARAM_QFACTORTYPE q_factor;
+ QOMX_WORK_BUFFER work_buffer;
+ OMX_INDEXTYPE work_buffer_index;
+ mm_jpeg_encode_params_t *p_params = &p_session->params;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ int i;
+
+ /* common config */
+ ret = mm_jpeg_session_config_common(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config common failed");
+ }
+
+ /* config Main Image crop */
+ LOGD("config main crop");
+ ret = mm_jpeg_session_config_main_crop(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config crop failed");
+ return ret;
+ }
+
+ /* set quality */
+ memset(&q_factor, 0, sizeof(q_factor));
+ q_factor.nPortIndex = 0;
+ q_factor.nQFactor = p_params->quality;
+ ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexParamQFactor, &q_factor);
+ LOGD("config QFactor: %d", (int)q_factor.nQFactor);
+ if (OMX_ErrorNone != ret) {
+ LOGE("Error setting Q factor %d", ret);
+ return ret;
+ }
+
+ /* config thumbnail */
+ ret = mm_jpeg_session_config_thumbnail(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config thumbnail img failed");
+ return ret;
+ }
+
+ //Pass the ION buffer to be used as o/p for HW
+ memset(&work_buffer, 0x0, sizeof(QOMX_WORK_BUFFER));
+ ret = OMX_GetExtensionIndex(p_session->omx_handle,
+ QOMX_IMAGE_EXT_WORK_BUFFER_NAME,
+ &work_buffer_index);
+ if (ret) {
+ LOGE("Error getting work buffer index %d", ret);
+ return ret;
+ }
+ work_buffer.fd = p_session->work_buffer.p_pmem_fd;
+ work_buffer.vaddr = p_session->work_buffer.addr;
+ work_buffer.length = (uint32_t)p_session->work_buffer.size;
+ LOGH("Work buffer info %d %p WorkBufSize: %d invalidate",
+ work_buffer.fd, work_buffer.vaddr, work_buffer.length);
+
+ buffer_invalidate(&p_session->work_buffer);
+
+ ret = OMX_SetConfig(p_session->omx_handle, work_buffer_index,
+ &work_buffer);
+ if (ret) {
+ LOGE("Error");
+ return ret;
+ }
+
+ /* set metadata */
+ ret = mm_jpeg_metadata(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config makernote data failed");
+ return ret;
+ }
+
+ /* set QTable */
+ for (i = 0; i < QTABLE_MAX; i++) {
+ if (p_jobparams->qtable_set[i]) {
+ ret = OMX_SetConfig(p_session->omx_handle,
+ OMX_IndexParamQuantizationTable, &p_jobparams->qtable[i]);
+ if (OMX_ErrorNone != ret) {
+ LOGE("set QTable Error");
+ return ret;
+ }
+ }
+ }
+
+ /* Set multi image data*/
+ ret = mm_jpeg_config_multi_image_info(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config multi image data failed");
+ return ret;
+ }
+
+ return ret;
+}
+
+/** mm_jpeg_session_configure:
+ *
+ * Arguments:
+ * @data: encode session
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+ LOGD("E ");
+
+ MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+ /* config main img */
+ ret = mm_jpeg_session_config_main(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config main img failed");
+ goto error;
+ }
+ ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+ mm_jpeg_session_send_buffers);
+ if (ret) {
+ LOGE("change state to idle failed %d", ret);
+ goto error;
+ }
+
+ ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+ NULL);
+ if (ret) {
+ LOGE("change state to executing failed %d", ret);
+ goto error;
+ }
+
+error:
+ LOGD("X ret %d", ret);
+ return ret;
+}
+
+
+
+
+
+
+/** mm_jpeg_session_encode:
+ *
+ * Arguments:
+ * @p_session: encode session
+ *
+ * Return:
+ * OMX_ERRORTYPE
+ *
+ * Description:
+ * Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_encode(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+ OMX_BUFFERHEADERTYPE *p_in_buf = NULL;
+ OMX_BUFFERHEADERTYPE *p_in_thumb_buf = NULL;
+
+ pthread_mutex_lock(&p_session->lock);
+ p_session->abort_state = MM_JPEG_ABORT_NONE;
+ p_session->encoding = OMX_FALSE;
+ pthread_mutex_unlock(&p_session->lock);
+
+ if (p_session->thumb_from_main) {
+ if (0 > p_jobparams->src_index) {
+ LOGE("Error");
+ ret = OMX_ErrorUnsupportedIndex;
+ goto error;
+ }
+ p_jobparams->thumb_index = (uint32_t)p_jobparams->src_index;
+ p_jobparams->thumb_dim.crop = p_jobparams->main_dim.crop;
+ }
+
+ if (OMX_FALSE == p_session->config) {
+ /* If another session in progress clear that sessions configuration */
+ if (my_obj->p_session_inprogress != NULL) {
+ OMX_STATETYPE state;
+ mm_jpeg_job_session_t *p_session_inprogress = my_obj->p_session_inprogress;
+
+ OMX_GetState(p_session_inprogress->omx_handle, &state);
+
+ //Check state before state transition
+ if ((state == OMX_StateExecuting) || (state == OMX_StatePause)) {
+ ret = mm_jpeg_session_change_state(p_session_inprogress,
+ OMX_StateIdle, NULL);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+ }
+
+ OMX_GetState(p_session_inprogress->omx_handle, &state);
+
+ if (state == OMX_StateIdle) {
+ ret = mm_jpeg_session_change_state(p_session_inprogress,
+ OMX_StateLoaded, mm_jpeg_session_free_buffers);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+ }
+ p_session_inprogress->config = OMX_FALSE;
+ my_obj->p_session_inprogress = NULL;
+ }
+
+ ret = mm_jpeg_session_configure(p_session);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+ p_session->config = OMX_TRUE;
+ my_obj->p_session_inprogress = p_session;
+ }
+
+ ret = mm_jpeg_configure_job_params(p_session);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+ pthread_mutex_lock(&p_session->lock);
+ p_session->encoding = OMX_TRUE;
+ pthread_mutex_unlock(&p_session->lock);
+
+ MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+ if (p_session->lib2d_rotation_flag) {
+ p_in_buf = p_session->p_in_rot_omx_buf[p_jobparams->src_index];
+ } else {
+ p_in_buf = p_session->p_in_omx_buf[p_jobparams->src_index];
+ }
+
+#ifdef MM_JPEG_DUMP_INPUT
+ char filename[256];
+ snprintf(filename, sizeof(filename),
+ QCAMERA_DUMP_FRM_LOCATION"jpeg/mm_jpeg_int%d.yuv", p_session->ebd_count);
+ DUMP_TO_FILE(filename, p_in_buf->pBuffer, (size_t)p_in_buf->nAllocLen);
+#endif
+ ret = OMX_EmptyThisBuffer(p_session->omx_handle, p_in_buf);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+
+ if (p_session->params.encode_thumbnail) {
+
+ if (p_session->thumb_from_main &&
+ p_session->lib2d_rotation_flag) {
+ p_in_thumb_buf = p_session->p_in_rot_omx_thumb_buf[p_jobparams->thumb_index];
+ } else {
+ p_in_thumb_buf = p_session->p_in_omx_thumb_buf[p_jobparams->thumb_index];
+ }
+
+#ifdef MM_JPEG_DUMP_INPUT
+ char thumb_filename[FILENAME_MAX];
+ snprintf(thumb_filename, sizeof(thumb_filename),
+ QCAMERA_DUMP_FRM_LOCATION"jpeg/mm_jpeg_int_t%d.yuv", p_session->ebd_count);
+ DUMP_TO_FILE(filename, p_in_thumb_buf->pBuffer,
+ (size_t)p_in_thumb_buf->nAllocLen);
+#endif
+ ret = OMX_EmptyThisBuffer(p_session->omx_handle, p_in_thumb_buf);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+ }
+
+ ret = OMX_FillThisBuffer(p_session->omx_handle,
+ p_session->p_out_omx_buf[p_jobparams->dst_index]);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+
+ MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+ LOGD("X ");
+ return ret;
+}
+
+/** mm_jpeg_process_encoding_job:
+ *
+ * Arguments:
+ * @my_obj: jpeg client
+ * @job_node: job node
+ *
+ * Return:
+ * 0 for success -1 otherwise
+ *
+ * Description:
+ * Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_process_encoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = 0;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_job_session_t *p_session = NULL;
+ uint32_t buf_idx;
+
+ /* check if valid session */
+ p_session = mm_jpeg_get_session(my_obj, job_node->enc_info.job_id);
+ if (NULL == p_session) {
+ LOGE("invalid job id %x",
+ job_node->enc_info.job_id);
+ return -1;
+ }
+
+ LOGD("before dequeue session %d", ret);
+
+ /* dequeue available omx handle */
+ qdata = mm_jpeg_queue_deq(p_session->session_handle_q);
+ p_session = qdata.p;
+
+ if (NULL == p_session) {
+ LOGH("No available sessions %d", ret);
+ /* No available handles */
+ qdata.p = job_node;
+ mm_jpeg_queue_enq_head(&my_obj->job_mgr.job_queue, qdata);
+
+ LOGH("end enqueue %d", ret);
+ return rc;
+
+ }
+
+ p_session->auto_out_buf = OMX_FALSE;
+ if (job_node->enc_info.encode_job.dst_index < 0) {
+ /* dequeue available output buffer idx */
+ qdata = mm_jpeg_queue_deq(p_session->out_buf_q);
+ buf_idx = qdata.u32;
+
+ if (0U == buf_idx) {
+ LOGE("No available output buffers %d", ret);
+ return OMX_ErrorUndefined;
+ }
+
+ buf_idx--;
+
+ job_node->enc_info.encode_job.dst_index = (int32_t)buf_idx;
+ p_session->auto_out_buf = OMX_TRUE;
+ }
+
+ /* sent encode cmd to OMX, queue job into ongoing queue */
+ qdata.p = job_node;
+ rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, qdata);
+ if (rc) {
+ LOGE("jpeg enqueue failed %d", ret);
+ goto error;
+ }
+
+ p_session->encode_job = job_node->enc_info.encode_job;
+ p_session->jobId = job_node->enc_info.job_id;
+ ret = mm_jpeg_session_encode(p_session);
+ if (ret) {
+ LOGE("encode session failed");
+ goto error;
+ }
+
+ LOGH("Success X ");
+ return rc;
+
+error:
+
+ if ((OMX_ErrorNone != ret) &&
+ (NULL != p_session->params.jpeg_cb)) {
+ p_session->job_status = JPEG_JOB_STATUS_ERROR;
+ LOGE("send jpeg error callback %d",
+ p_session->job_status);
+ p_session->params.jpeg_cb(p_session->job_status,
+ p_session->client_hdl,
+ p_session->jobId,
+ NULL,
+ p_session->params.userdata);
+ }
+
+ /*remove the job*/
+ mm_jpegenc_job_done(p_session);
+ LOGD("Error X ");
+
+ return rc;
+}
+
+
+
+/** mm_jpeg_jobmgr_thread:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * job manager thread main function
+ *
+ **/
+static void *mm_jpeg_jobmgr_thread(void *data)
+{
+ mm_jpeg_q_data_t qdata;
+ int rc = 0;
+ int running = 1;
+ uint32_t num_ongoing_jobs = 0;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj*)data;
+ mm_jpeg_job_cmd_thread_t *cmd_thread = &my_obj->job_mgr;
+ mm_jpeg_job_q_node_t* node = NULL;
+ prctl(PR_SET_NAME, (unsigned long)"mm_jpeg_thread", 0, 0, 0);
+
+ do {
+ do {
+ rc = cam_sem_wait(&cmd_thread->job_sem);
+ if (rc != 0 && errno != EINVAL) {
+ LOGE("cam_sem_wait error (%s)",
+ strerror(errno));
+ return NULL;
+ }
+ } while (rc != 0);
+
+ /* check ongoing q size */
+ num_ongoing_jobs = mm_jpeg_queue_get_size(&my_obj->ongoing_job_q);
+
+ LOGD("ongoing job %d %d", num_ongoing_jobs, MM_JPEG_CONCURRENT_SESSIONS_COUNT);
+ if (num_ongoing_jobs >= MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+ LOGE("ongoing job already reach max %d", num_ongoing_jobs);
+ continue;
+ }
+
+ pthread_mutex_lock(&my_obj->job_lock);
+ /* can go ahead with new work */
+ qdata = mm_jpeg_queue_deq(&cmd_thread->job_queue);
+ node = (mm_jpeg_job_q_node_t*)qdata.p;
+ if (node != NULL) {
+ switch (node->type) {
+ case MM_JPEG_CMD_TYPE_JOB:
+ rc = mm_jpeg_process_encoding_job(my_obj, node);
+ break;
+ case MM_JPEG_CMD_TYPE_DECODE_JOB:
+ rc = mm_jpegdec_process_decoding_job(my_obj, node);
+ break;
+ case MM_JPEG_CMD_TYPE_EXIT:
+ default:
+ /* free node */
+ free(node);
+ /* set running flag to false */
+ running = 0;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ } while (running);
+ return NULL;
+}
+
+/** mm_jpeg_jobmgr_thread_launch:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * launches the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_cmd_thread_t *job_mgr = &my_obj->job_mgr;
+
+ cam_sem_init(&job_mgr->job_sem, 0);
+ mm_jpeg_queue_init(&job_mgr->job_queue);
+
+ /* launch the thread */
+ pthread_create(&job_mgr->pid,
+ NULL,
+ mm_jpeg_jobmgr_thread,
+ (void *)my_obj);
+ pthread_setname_np(job_mgr->pid, "CAM_jpeg_jobmgr");
+ return rc;
+}
+
+/** mm_jpeg_jobmgr_thread_release:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Releases the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = 0;
+ mm_jpeg_job_cmd_thread_t * cmd_thread = &my_obj->job_mgr;
+ mm_jpeg_job_q_node_t* node =
+ (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_jpeg_job_q_node_t");
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+ node->type = MM_JPEG_CMD_TYPE_EXIT;
+
+ qdata.p = node;
+ mm_jpeg_queue_enq(&cmd_thread->job_queue, qdata);
+ cam_sem_post(&cmd_thread->job_sem);
+
+ /* wait until cmd thread exits */
+ if (pthread_join(cmd_thread->pid, NULL) != 0) {
+ LOGD("pthread dead already");
+ }
+ mm_jpeg_queue_deinit(&cmd_thread->job_queue);
+
+ cam_sem_destroy(&cmd_thread->job_sem);
+ memset(cmd_thread, 0, sizeof(mm_jpeg_job_cmd_thread_t));
+ return rc;
+}
+
+/** mm_jpeg_alloc_workbuffer:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @work_bufs_need: number of work buffers required
+ * @work_buf_size: size of the work buffer
+ *
+ * Return:
+ * greater or equal to 0 for success else failure
+ *
+ * Description:
+ * Allocates work buffer
+ *
+ **/
+int32_t mm_jpeg_alloc_workbuffer(mm_jpeg_obj *my_obj,
+ uint32_t work_bufs_need,
+ uint32_t work_buf_size)
+{
+ int32_t rc = 0;
+ uint32_t i;
+ LOGH("work_bufs_need %d work_buf_cnt %d",
+ work_bufs_need, my_obj->work_buf_cnt);
+ for (i = my_obj->work_buf_cnt; i < work_bufs_need; i++) {
+ my_obj->ionBuffer[i].size = CEILING32(work_buf_size);
+ LOGH("Max picture size %d x %d, WorkBufSize = %zu",
+ my_obj->max_pic_w, my_obj->max_pic_h, my_obj->ionBuffer[i].size);
+ my_obj->ionBuffer[i].addr = (uint8_t *)buffer_allocate(&my_obj->ionBuffer[i], 1);
+ if (NULL == my_obj->ionBuffer[i].addr) {
+ LOGE("Ion allocation failed");
+ while (i--) {
+ buffer_deallocate(&my_obj->ionBuffer[i]);
+ my_obj->work_buf_cnt--;
+ }
+ return -1;
+ }
+ my_obj->work_buf_cnt++;
+ rc = i;
+ }
+ LOGH("rc %d ", rc);
+ return rc;
+}
+
+/** mm_jpeg_release_workbuffer:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @work_bufs_need: number of work buffers allocated
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Releases the allocated work buffer
+ *
+ **/
+int32_t mm_jpeg_release_workbuffer(mm_jpeg_obj *my_obj,
+ uint32_t work_bufs_need)
+{
+ int32_t rc = 0;
+ uint32_t i;
+ LOGH("release work_bufs %d ", work_bufs_need);
+ for (i = my_obj->work_buf_cnt; i < work_bufs_need; i++) {
+ buffer_deallocate(&my_obj->ionBuffer[i]);
+ }
+ return rc;
+}
+
+/** mm_jpeg_init:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpeg_init(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+ uint32_t work_buf_size;
+ unsigned int initial_workbufs_cnt = 1;
+
+ /* init locks */
+ pthread_mutex_init(&my_obj->job_lock, NULL);
+
+ /* init ongoing job queue */
+ rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+ if (0 != rc) {
+ LOGE("Error");
+ pthread_mutex_destroy(&my_obj->job_lock);
+ return -1;
+ }
+
+
+ /* init job semaphore and launch jobmgr thread */
+ LOGD("Launch jobmgr thread rc %d", rc);
+ rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+ if (0 != rc) {
+ LOGE("Error");
+ mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ pthread_mutex_destroy(&my_obj->job_lock);
+ return -1;
+ }
+
+ /* set work buf size from max picture size */
+ if (my_obj->max_pic_w <= 0 || my_obj->max_pic_h <= 0) {
+ LOGE("Width and height are not valid "
+ "dimensions, cannot calc work buf size");
+ mm_jpeg_jobmgr_thread_release(my_obj);
+ mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ pthread_mutex_destroy(&my_obj->job_lock);
+ return -1;
+ }
+
+ /* allocate work buffer if reproc source buffer is not supposed to be used */
+ if (!my_obj->reuse_reproc_buffer) {
+ work_buf_size = CEILING64((uint32_t)my_obj->max_pic_w) *
+ CEILING64((uint32_t)my_obj->max_pic_h) * 3U / 2U;
+ rc = mm_jpeg_alloc_workbuffer(my_obj, initial_workbufs_cnt, work_buf_size);
+ if (rc == -1) {
+ LOGE("Work buffer allocation failure");
+ return rc;
+ }
+ }
+
+ /* load OMX */
+ if (OMX_ErrorNone != OMX_Init()) {
+ /* roll back in error case */
+ LOGE("OMX_Init failed (%d)", rc);
+ if (!my_obj->reuse_reproc_buffer) {
+ mm_jpeg_release_workbuffer(my_obj, initial_workbufs_cnt);
+ }
+ mm_jpeg_jobmgr_thread_release(my_obj);
+ mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ pthread_mutex_destroy(&my_obj->job_lock);
+ }
+
+#ifdef LOAD_ADSP_RPC_LIB
+ my_obj->adsprpc_lib_handle = dlopen("libadsprpc.so", RTLD_NOW);
+ if (NULL == my_obj->adsprpc_lib_handle) {
+ LOGE("Cannot load the library");
+ /* not returning error here bcoz even if this loading fails
+ we can go ahead with SW JPEG enc */
+ }
+#endif
+
+ // create dummy OMX handle to avoid dlopen latency
+ OMX_GetHandle(&my_obj->dummy_handle, mm_jpeg_get_comp_name(), NULL, NULL);
+
+ return rc;
+}
+
+/** mm_jpeg_deinit:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+ uint32_t i = 0;
+
+ /* release jobmgr thread */
+ rc = mm_jpeg_jobmgr_thread_release(my_obj);
+ if (0 != rc) {
+ LOGE("Error");
+ }
+
+ if (my_obj->dummy_handle) {
+ OMX_FreeHandle(my_obj->dummy_handle);
+ }
+
+ /* unload OMX engine */
+ OMX_Deinit();
+
+ /* deinit ongoing job and cb queue */
+ rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ if (0 != rc) {
+ LOGE("Error");
+ }
+
+ for (i = 0; i < my_obj->work_buf_cnt; i++) {
+ /*Release the ION buffer*/
+ rc = buffer_deallocate(&my_obj->ionBuffer[i]);
+ if (0 != rc) {
+ LOGE("Error releasing ION buffer");
+ }
+ }
+ my_obj->work_buf_cnt = 0;
+ my_obj->jpeg_metadata = NULL;
+
+ /* destroy locks */
+ pthread_mutex_destroy(&my_obj->job_lock);
+
+ return rc;
+}
+
+/** mm_jpeg_new_client:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Create new jpeg client
+ *
+ **/
+uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj)
+{
+ uint32_t client_hdl = 0;
+ uint8_t idx;
+ int i = 0;
+
+ if (my_obj->num_clients >= MAX_JPEG_CLIENT_NUM) {
+ LOGE("num of clients reached limit");
+ return client_hdl;
+ }
+
+ for (idx = 0; idx < MAX_JPEG_CLIENT_NUM; idx++) {
+ if (0 == my_obj->clnt_mgr[idx].is_used) {
+ break;
+ }
+ }
+
+ if (idx < MAX_JPEG_CLIENT_NUM) {
+ /* client session avail */
+ /* generate client handler by index */
+ client_hdl = mm_jpeg_util_generate_handler(idx);
+
+ /* update client session */
+ my_obj->clnt_mgr[idx].is_used = 1;
+ my_obj->clnt_mgr[idx].client_handle = client_hdl;
+
+ pthread_mutex_init(&my_obj->clnt_mgr[idx].lock, NULL);
+ for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+ memset(&my_obj->clnt_mgr[idx].session[i], 0x0, sizeof(mm_jpeg_job_session_t));
+ }
+
+ /* increse client count */
+ my_obj->num_clients++;
+ }
+
+ return client_hdl;
+}
+
+#ifdef LIB2D_ROTATION_ENABLE
+/**
+ * Function: mm_jpeg_lib2d_rotation_cb
+ *
+ * Description: Callback that is called on completion of requested job.
+ *
+ * Input parameters:
+ * userdata - App userdata
+ * jobid - job id that is finished execution
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_jpeg_lib2d_rotation_cb(void *userdata, int jobid)
+{
+ LOGD("Received CB from lib2d\n");
+ return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: mm_jpeg_lib2d_rotation
+ *
+ * Description: lib2d rotation function.
+ *
+ * Input parameters:
+ * p_session - pointer to session
+ * p_node - pointer to job queue node
+ * p_job - pointer to job
+ * p_job_id - pointer to job id
+ *
+ * Return values:
+ * 0 - success
+ * -1 - failure
+ *
+ * Notes: none
+ **/
+int32_t mm_jpeg_lib2d_rotation(mm_jpeg_job_session_t *p_session,
+ mm_jpeg_job_q_node_t* p_node, mm_jpeg_job_t *p_job, uint32_t *p_job_id)
+{
+ lib2d_error lib2d_err = MM_LIB2D_SUCCESS;
+ mm_lib2d_buffer src_buffer;
+ mm_lib2d_buffer dst_buffer;
+ mm_jpeg_buf_t *p_src_main_buf = p_session->params.src_main_buf;
+ mm_jpeg_buf_t *p_src_rot_main_buf = p_session->src_rot_main_buf;
+ mm_jpeg_encode_job_t *p_jobparams = &p_job->encode_job;
+ mm_jpeg_encode_job_t *p_jobparams_node = &p_node->enc_info.encode_job;
+ cam_format_t format;
+ int32_t scanline = 0;
+
+ memset(&src_buffer, 0x0, sizeof(mm_lib2d_buffer));
+ memset(&dst_buffer, 0x0, sizeof(mm_lib2d_buffer));
+
+ switch (p_session->params.rotation) {
+ case 0:
+ break;
+ case 90:
+ p_jobparams_node->main_dim.src_dim.width =
+ p_jobparams->main_dim.src_dim.height;
+ p_jobparams_node->main_dim.src_dim.height =
+ p_jobparams->main_dim.src_dim.width;
+
+ p_jobparams_node->main_dim.dst_dim.width =
+ p_jobparams->main_dim.dst_dim.height;
+ p_jobparams_node->main_dim.dst_dim.height =
+ p_jobparams->main_dim.dst_dim.width;
+
+ p_jobparams_node->main_dim.crop.width =
+ p_jobparams->main_dim.crop.height;
+ p_jobparams_node->main_dim.crop.height =
+ p_jobparams->main_dim.crop.width;
+
+ if (p_jobparams->main_dim.crop.top ||
+ p_jobparams->main_dim.crop.height) {
+ p_jobparams_node->main_dim.crop.left =
+ p_jobparams->main_dim.src_dim.height -
+ (p_jobparams->main_dim.crop.top +
+ p_jobparams->main_dim.crop.height);
+ } else {
+ p_jobparams_node->main_dim.crop.left = 0;
+ }
+ p_jobparams_node->main_dim.crop.top =
+ p_jobparams->main_dim.crop.left;
+ break;
+ case 180:
+ if (p_jobparams->main_dim.crop.left ||
+ p_jobparams->main_dim.crop.width) {
+ p_jobparams_node->main_dim.crop.left =
+ p_jobparams->main_dim.src_dim.width -
+ (p_jobparams->main_dim.crop.left +
+ p_jobparams->main_dim.crop.width);
+ } else {
+ p_jobparams_node->main_dim.crop.left = 0;
+ }
+
+ if (p_jobparams->main_dim.crop.top ||
+ p_jobparams->main_dim.crop.height) {
+ p_jobparams_node->main_dim.crop.top =
+ p_jobparams->main_dim.src_dim.height -
+ (p_jobparams->main_dim.crop.top +
+ p_jobparams->main_dim.crop.height);
+ } else {
+ p_jobparams_node->main_dim.crop.top = 0;
+ }
+ break;
+ case 270:
+ p_jobparams_node->main_dim.src_dim.width =
+ p_jobparams->main_dim.src_dim.height;
+ p_jobparams_node->main_dim.src_dim.height =
+ p_jobparams->main_dim.src_dim.width;
+
+ p_jobparams_node->main_dim.dst_dim.width =
+ p_jobparams->main_dim.dst_dim.height;
+ p_jobparams_node->main_dim.dst_dim.height =
+ p_jobparams->main_dim.dst_dim.width;
+
+ p_jobparams_node->main_dim.crop.width =
+ p_jobparams->main_dim.crop.height;
+ p_jobparams_node->main_dim.crop.height =
+ p_jobparams->main_dim.crop.width;
+ p_jobparams_node->main_dim.crop.left =
+ p_jobparams->main_dim.crop.top;
+ if (p_jobparams->main_dim.crop.left ||
+ p_jobparams->main_dim.crop.width) {
+ p_jobparams_node->main_dim.crop.top =
+ p_jobparams->main_dim.src_dim.width -
+ (p_jobparams->main_dim.crop.left +
+ p_jobparams->main_dim.crop.width);
+ } else {
+ p_jobparams_node->main_dim.crop.top = 0;
+ }
+ break;
+ }
+
+ LOGD("crop wxh %dx%d txl %dx%d",
+ p_jobparams_node->main_dim.crop.width,
+ p_jobparams_node->main_dim.crop.height,
+ p_jobparams_node->main_dim.crop.top,
+ p_jobparams_node->main_dim.crop.left);
+
+ format = mm_jpeg_get_imgfmt_from_colorfmt(p_session->params.color_format);
+ src_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_YUV;
+ src_buffer.yuv_buffer.fd =
+ p_src_main_buf[p_jobparams->src_index].fd;
+ src_buffer.yuv_buffer.format = format;
+ src_buffer.yuv_buffer.width = p_jobparams->main_dim.src_dim.width;
+ src_buffer.yuv_buffer.height = p_jobparams->main_dim.src_dim.height;
+ src_buffer.yuv_buffer.plane0 =
+ p_src_main_buf[p_jobparams->src_index].buf_vaddr;
+ src_buffer.yuv_buffer.stride0 =
+ p_src_main_buf[p_jobparams->src_index].offset.mp[0].stride;
+ scanline = p_src_main_buf[p_jobparams->src_index].offset.mp[0].scanline;
+ src_buffer.yuv_buffer.plane1 =
+ (uint8_t*)src_buffer.yuv_buffer.plane0 +
+ (src_buffer.yuv_buffer.stride0 * scanline);
+ src_buffer.yuv_buffer.stride1 = src_buffer.yuv_buffer.stride0;
+
+ LOGD(" lib2d SRC wxh = %dx%d , stxsl = %dx%d\n",
+ src_buffer.yuv_buffer.width, src_buffer.yuv_buffer.height,
+ src_buffer.yuv_buffer.stride0, scanline);
+
+ dst_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_YUV;
+ dst_buffer.yuv_buffer.fd =
+ p_src_rot_main_buf[p_jobparams->src_index].fd;
+ dst_buffer.yuv_buffer.format = format;
+ dst_buffer.yuv_buffer.width = p_jobparams_node->main_dim.src_dim.width;
+ dst_buffer.yuv_buffer.height = p_jobparams_node->main_dim.src_dim.height;
+ dst_buffer.yuv_buffer.plane0 =
+ p_src_rot_main_buf[p_jobparams->src_index].buf_vaddr;
+
+ if ((p_session->params.rotation == 90) ||
+ (p_session->params.rotation == 270)) {
+ dst_buffer.yuv_buffer.stride0 =
+ p_src_main_buf[p_jobparams->src_index].offset.mp[0].scanline;
+ scanline = p_src_main_buf[p_jobparams->src_index].offset.mp[0].stride;
+ } else {
+ dst_buffer.yuv_buffer.stride0 =
+ p_src_main_buf[p_jobparams->src_index].offset.mp[0].stride;
+ scanline = p_src_main_buf[p_jobparams->src_index].offset.mp[0].scanline;
+ }
+
+ dst_buffer.yuv_buffer.plane1 =
+ (uint8_t*) dst_buffer.yuv_buffer.plane0 +
+ (dst_buffer.yuv_buffer.stride0 * scanline);
+ dst_buffer.yuv_buffer.stride1 = dst_buffer.yuv_buffer.stride0;
+
+ LOGD(" lib2d DEST wxh = %dx%d , stxsl = %dx%d\n",
+ dst_buffer.yuv_buffer.width, dst_buffer.yuv_buffer.height,
+ dst_buffer.yuv_buffer.stride0, scanline);
+
+ LOGD(" lib2d rotation = %d\n", p_session->params.rotation);
+
+ lib2d_err = mm_lib2d_start_job(p_session->lib2d_handle, &src_buffer, &dst_buffer,
+ *p_job_id, NULL, mm_jpeg_lib2d_rotation_cb, p_session->params.rotation);
+ if (lib2d_err != MM_LIB2D_SUCCESS) {
+ LOGE("Error in mm_lib2d_start_job \n");
+ return -1;
+ }
+
+ buffer_clean(&p_session->src_rot_ion_buffer[p_jobparams->src_index]);
+
+ return 0;
+}
+#endif
+
+/** mm_jpeg_start_job:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ * @job: pointer to encode job
+ * @jobId: job id
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_t *job,
+ uint32_t *job_id)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = -1;
+ uint8_t session_idx = 0;
+ uint8_t client_idx = 0;
+ mm_jpeg_job_q_node_t* node = NULL;
+ mm_jpeg_job_session_t *p_session = NULL;
+ mm_jpeg_encode_job_t *p_jobparams = NULL;
+ uint32_t work_bufs_need;
+ uint32_t work_buf_size;
+
+ *job_id = 0;
+
+ if (!job) {
+ LOGE("invalid job !!!");
+ return rc;
+ }
+ p_jobparams = &job->encode_job;
+
+ /* check if valid session */
+ session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+ client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+ LOGD("session_idx %d client idx %d",
+ session_idx, client_idx);
+
+ if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+ (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+ LOGE("invalid session id %x",
+ job->encode_job.session_id);
+ return rc;
+ }
+
+ p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+
+ if (my_obj->reuse_reproc_buffer) {
+ p_session->work_buffer.addr = p_jobparams->work_buf.buf_vaddr;
+ p_session->work_buffer.size = p_jobparams->work_buf.buf_size;
+ p_session->work_buffer.ion_info_fd.fd = p_jobparams->work_buf.fd;
+ p_session->work_buffer.p_pmem_fd = p_jobparams->work_buf.fd;
+
+ work_bufs_need = my_obj->num_sessions + 1;
+ if (work_bufs_need > MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+ work_bufs_need = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+ }
+
+ if (p_session->work_buffer.addr) {
+ work_bufs_need--;
+ LOGD("HAL passed the work buffer of size = %d; don't alloc internally",
+ p_session->work_buffer.size);
+ } else {
+ p_session->work_buffer = my_obj->ionBuffer[0];
+ }
+
+ LOGD(">>>> Work bufs need %d, %d",
+ work_bufs_need, my_obj->work_buf_cnt);
+ if (work_bufs_need) {
+ work_buf_size = CEILING64(my_obj->max_pic_w) *
+ CEILING64(my_obj->max_pic_h) * 3 / 2;
+ rc = mm_jpeg_alloc_workbuffer(my_obj, work_bufs_need, work_buf_size);
+ if (rc == -1) {
+ LOGE("Work buffer allocation failure");
+ return rc;
+ } else {
+ p_session->work_buffer = my_obj->ionBuffer[rc];
+ }
+ }
+ }
+
+ if (OMX_FALSE == p_session->active) {
+ LOGE("session not active %x",
+ job->encode_job.session_id);
+ return rc;
+ }
+
+ if ((p_jobparams->src_index >= (int32_t)p_session->params.num_src_bufs) ||
+ (p_jobparams->dst_index >= (int32_t)p_session->params.num_dst_bufs)) {
+ LOGE("invalid buffer indices");
+ return rc;
+ }
+
+ /* enqueue new job into todo job queue */
+ node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_jpeg_job_q_node_t");
+ return -1;
+ }
+
+ KPI_ATRACE_INT("Camera:JPEG",
+ (int32_t)((uint32_t)session_idx<<16 | ++p_session->job_index));
+
+ *job_id = job->encode_job.session_id |
+ (((uint32_t)p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+ memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+ node->enc_info.encode_job = job->encode_job;
+
+#ifdef LIB2D_ROTATION_ENABLE
+ if (p_session->lib2d_rotation_flag) {
+ rc = mm_jpeg_lib2d_rotation(p_session, node, job, job_id);
+ if (rc < 0) {
+ LOGE("Lib2d rotation failed");
+ return rc;
+ }
+ }
+#endif
+
+ if (p_session->thumb_from_main) {
+ node->enc_info.encode_job.thumb_dim.src_dim =
+ node->enc_info.encode_job.main_dim.src_dim;
+ node->enc_info.encode_job.thumb_dim.crop =
+ node->enc_info.encode_job.main_dim.crop;
+ if (p_session->lib2d_rotation_flag) {
+ if ((p_session->params.rotation == 90) ||
+ (p_session->params.rotation == 270)) {
+ node->enc_info.encode_job.thumb_dim.dst_dim.width =
+ job->encode_job.thumb_dim.dst_dim.height;
+ node->enc_info.encode_job.thumb_dim.dst_dim.height =
+ job->encode_job.thumb_dim.dst_dim.width;
+ }
+ }
+ }
+ node->enc_info.job_id = *job_id;
+ node->enc_info.client_handle = p_session->client_hdl;
+ node->type = MM_JPEG_CMD_TYPE_JOB;
+
+ qdata.p = node;
+ rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, qdata);
+ if (0 == rc) {
+ cam_sem_post(&my_obj->job_mgr.job_sem);
+ }
+
+ LOGH("session_idx %u client_idx %u job_id %d X",
+ session_idx, client_idx, *job_id);
+
+ return rc;
+}
+
+
+
+/** mm_jpeg_abort_job:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ * @jobId: job id
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Abort the encoding session
+ *
+ **/
+int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+ uint32_t jobId)
+{
+ int32_t rc = -1;
+ mm_jpeg_job_q_node_t *node = NULL;
+ mm_jpeg_job_session_t *p_session = NULL;
+
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ /* abort job if in todo queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+ if (NULL != node) {
+ free(node);
+ goto abort_done;
+ }
+
+ /* abort job if in ongoing queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+ if (NULL != node) {
+ /* find job that is OMX ongoing, ask OMX to abort the job */
+ p_session = mm_jpeg_get_session(my_obj, node->enc_info.job_id);
+ if (p_session) {
+ mm_jpeg_session_abort(p_session);
+ } else {
+ LOGE("Invalid job id 0x%x",
+ node->enc_info.job_id);
+ }
+ free(node);
+ goto abort_done;
+ }
+
+abort_done:
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ return rc;
+}
+
+
+#ifdef MM_JPEG_READ_META_KEYFILE
+static int32_t mm_jpeg_read_meta_keyfile(mm_jpeg_job_session_t *p_session,
+ const char *filename)
+{
+ int rc = 0;
+ FILE *fp = NULL;
+ size_t file_size = 0;
+ fp = fopen(filename, "r");
+ if (!fp) {
+ LOGE("Key not present");
+ return -1;
+ }
+ fseek(fp, 0, SEEK_END);
+ file_size = (size_t)ftell(fp);
+ fseek(fp, 0, SEEK_SET);
+
+ p_session->meta_enc_key = (uint8_t *) malloc((file_size + 1) * sizeof(uint8_t));
+
+ if (!p_session->meta_enc_key) {
+ LOGE("error");
+ return -1;
+ }
+
+ fread(p_session->meta_enc_key, 1, file_size, fp);
+ fclose(fp);
+
+ p_session->meta_enc_keylen = file_size;
+
+ return rc;
+}
+#endif // MM_JPEG_READ_META_KEYFILE
+
+/** mm_jpeg_create_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ * @p_params: pointer to encode params
+ * @p_session_id: session id
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Start the encoding session
+ *
+ **/
+int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ mm_jpeg_encode_params_t *p_params,
+ uint32_t* p_session_id)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = 0;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ uint8_t clnt_idx = 0;
+ int session_idx = -1;
+ mm_jpeg_job_session_t *p_session = NULL;
+ mm_jpeg_job_session_t * p_prev_session = NULL;
+ *p_session_id = 0;
+ uint32_t i = 0;
+ uint32_t j = 0;
+ uint32_t num_omx_sessions = 1;
+ uint32_t work_buf_size;
+ mm_jpeg_queue_t *p_session_handle_q, *p_out_buf_q;
+ uint32_t work_bufs_need;
+ char trace_tag[32];
+
+ /* validate the parameters */
+ if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+ || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+ LOGE("invalid num buffers");
+ return -1;
+ }
+
+ /* check if valid client */
+ clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+ if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+ LOGE("invalid client with handler (%d)", client_hdl);
+ return -1;
+ }
+
+ if (p_params->burst_mode) {
+ num_omx_sessions = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+ }
+
+ if (!my_obj->reuse_reproc_buffer) {
+ work_bufs_need = num_omx_sessions;
+ if (work_bufs_need > MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+ work_bufs_need = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+ }
+ LOGD(">>>> Work bufs need %d", work_bufs_need);
+ work_buf_size = CEILING64(my_obj->max_pic_w) *
+ CEILING64(my_obj->max_pic_h) * 3 / 2;
+ rc = mm_jpeg_alloc_workbuffer(my_obj, work_bufs_need, work_buf_size);
+ if (rc == -1) {
+ LOGE("Work buffer allocation failure");
+ return rc;
+ }
+ }
+
+
+ /* init omx handle queue */
+ p_session_handle_q = (mm_jpeg_queue_t *) malloc(sizeof(*p_session_handle_q));
+ if (NULL == p_session_handle_q) {
+ LOGE("Error");
+ goto error1;
+ }
+ rc = mm_jpeg_queue_init(p_session_handle_q);
+ if (0 != rc) {
+ LOGE("Error");
+ free(p_session_handle_q);
+ goto error1;
+ }
+
+ /* init output buf queue */
+ p_out_buf_q = (mm_jpeg_queue_t *) malloc(sizeof(*p_out_buf_q));
+ if (NULL == p_out_buf_q) {
+ LOGE("Error: Cannot allocate memory\n");
+ return -1;
+ }
+
+ /* init omx handle queue */
+ rc = mm_jpeg_queue_init(p_out_buf_q);
+ if (0 != rc) {
+ LOGE("Error");
+ free(p_out_buf_q);
+ goto error1;
+ }
+
+ for (i = 0; i < num_omx_sessions; i++) {
+ uint32_t buf_idx = 0U;
+ session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+ if (session_idx < 0 || NULL == p_session) {
+ LOGE("invalid session id (%d)", session_idx);
+ goto error2;
+ }
+
+ snprintf(trace_tag, sizeof(trace_tag), "Camera:JPEGsession%d", session_idx);
+ ATRACE_INT(trace_tag, 1);
+
+ p_session->job_index = 0;
+
+ p_session->next_session = NULL;
+
+ if (p_prev_session) {
+ p_prev_session->next_session = p_session;
+ }
+ p_prev_session = p_session;
+
+ buf_idx = i;
+ if (buf_idx < MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+ p_session->work_buffer = my_obj->ionBuffer[buf_idx];
+ } else {
+ LOGE("Invalid Index, Setting buffer add to null");
+ p_session->work_buffer.addr = NULL;
+ p_session->work_buffer.ion_fd = -1;
+ p_session->work_buffer.p_pmem_fd = -1;
+ }
+
+ p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+
+ /*copy the params*/
+ p_session->params = *p_params;
+ ret = mm_jpeg_session_create(p_session);
+ if (OMX_ErrorNone != ret) {
+ p_session->active = OMX_FALSE;
+ LOGE("jpeg session create failed");
+ goto error2;
+ }
+
+ uint32_t session_id = (JOB_ID_MAGICVAL << 24) |
+ ((uint32_t)session_idx << 8) | clnt_idx;
+
+ if (!*p_session_id) {
+ *p_session_id = session_id;
+ }
+
+ if (p_session->thumb_from_main) {
+ memcpy(p_session->params.src_thumb_buf, p_session->params.src_main_buf,
+ sizeof(p_session->params.src_thumb_buf));
+ p_session->params.num_tmb_bufs = p_session->params.num_src_bufs;
+ if (!p_session->params.encode_thumbnail) {
+ p_session->params.num_tmb_bufs = 0;
+ }
+ p_session->params.thumb_dim.src_dim = p_session->params.main_dim.src_dim;
+ p_session->params.thumb_dim.crop = p_session->params.main_dim.crop;
+ }
+#ifdef LIB2D_ROTATION_ENABLE
+ if (p_session->params.rotation) {
+ LOGD("Enable lib2d rotation");
+ p_session->lib2d_rotation_flag = 1;
+
+ cam_format_t lib2d_format;
+ lib2d_error lib2d_err = MM_LIB2D_SUCCESS;
+ lib2d_format = mm_jpeg_get_imgfmt_from_colorfmt(p_session->params.color_format);
+ lib2d_err = mm_lib2d_init(MM_LIB2D_SYNC_MODE, lib2d_format,
+ lib2d_format, &p_session->lib2d_handle);
+ if (lib2d_err != MM_LIB2D_SUCCESS) {
+ LOGE("lib2d init for rotation failed\n");
+ rc = -1;
+ p_session->lib2d_rotation_flag = 0;
+ goto error2;
+ }
+ } else {
+ LOGD("Disable lib2d rotation");
+ p_session->lib2d_rotation_flag = 0;
+ }
+#else
+ p_session->lib2d_rotation_flag = 0;
+#endif
+
+ if (p_session->lib2d_rotation_flag) {
+ p_session->num_src_rot_bufs = p_session->params.num_src_bufs;
+ memset(p_session->src_rot_main_buf, 0,
+ sizeof(p_session->src_rot_main_buf));
+
+ for (j = 0; j < p_session->num_src_rot_bufs; j++) {
+ p_session->src_rot_main_buf[j].buf_size =
+ p_session->params.src_main_buf[j].buf_size;
+ p_session->src_rot_main_buf[j].format =
+ p_session->params.src_main_buf[j].format;
+ p_session->src_rot_main_buf[j].index = j;
+
+ memset(&p_session->src_rot_ion_buffer[j], 0, sizeof(buffer_t));
+ p_session->src_rot_ion_buffer[j].size =
+ p_session->src_rot_main_buf[j].buf_size;
+ p_session->src_rot_ion_buffer[j].addr =
+ (uint8_t *)buffer_allocate(&p_session->src_rot_ion_buffer[j], 1);
+
+ if (NULL == p_session->src_rot_ion_buffer[j].addr) {
+ LOGE("Ion buff alloc for rotation failed");
+ // deallocate all previously allocated rotation ion buffs
+ for (j = 0; j < p_session->num_src_rot_bufs; j++) {
+ if (p_session->src_rot_ion_buffer[j].addr) {
+ buffer_deallocate(&p_session->src_rot_ion_buffer[j]);
+ }
+ }
+ //fall back to SW encoding for rotation
+ p_session->lib2d_rotation_flag = 0;
+ } else {
+ p_session->src_rot_main_buf[j].buf_vaddr =
+ p_session->src_rot_ion_buffer[j].addr;
+ p_session->src_rot_main_buf[j].fd =
+ p_session->src_rot_ion_buffer[j].p_pmem_fd;
+ }
+ }
+ }
+
+ p_session->client_hdl = client_hdl;
+ p_session->sessionId = session_id;
+ p_session->session_handle_q = p_session_handle_q;
+ p_session->out_buf_q = p_out_buf_q;
+
+ qdata.p = p_session;
+ mm_jpeg_queue_enq(p_session_handle_q, qdata);
+
+ p_session->meta_enc_key = NULL;
+ p_session->meta_enc_keylen = 0;
+
+#ifdef MM_JPEG_READ_META_KEYFILE
+ mm_jpeg_read_meta_keyfile(p_session, META_KEYFILE);
+#endif
+
+ pthread_mutex_lock(&my_obj->job_lock);
+ /* Configure session if not already configured and if
+ no other session configured*/
+ if ((OMX_FALSE == p_session->config) &&
+ (my_obj->p_session_inprogress == NULL)) {
+ rc = mm_jpeg_session_configure(p_session);
+ if (rc) {
+ LOGE("Error");
+ pthread_mutex_unlock(&my_obj->job_lock);
+ goto error2;
+ }
+ p_session->config = OMX_TRUE;
+ my_obj->p_session_inprogress = p_session;
+ }
+ pthread_mutex_unlock(&my_obj->job_lock);
+ p_session->num_omx_sessions = num_omx_sessions;
+
+ LOGH("session id %x thumb_from_main %d",
+ session_id, p_session->thumb_from_main);
+ }
+
+ // Queue the output buf indexes
+ for (i = 0; i < p_params->num_dst_bufs; i++) {
+ qdata.u32 = i + 1;
+ mm_jpeg_queue_enq(p_out_buf_q, qdata);
+ }
+
+ return rc;
+
+error1:
+ rc = -1;
+error2:
+ if (NULL != p_session) {
+ ATRACE_INT(trace_tag, 0);
+ }
+ return rc;
+}
+
+/** mm_jpegenc_destroy_job
+ *
+ * Arguments:
+ * @p_session: Session obj
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the job based paramenters
+ *
+ **/
+static int32_t mm_jpegenc_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+ mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+ int i = 0, rc = 0;
+
+ LOGD("Exif entry count %d %d",
+ (int)p_jobparams->exif_info.numOfEntries,
+ (int)p_session->exif_count_local);
+ for (i = 0; i < p_session->exif_count_local; i++) {
+ rc = releaseExifEntry(&p_session->exif_info_local[i]);
+ if (rc) {
+ LOGE("Exif release failed (%d)", rc);
+ }
+ }
+ p_session->exif_count_local = 0;
+
+ return rc;
+}
+
+/** mm_jpeg_session_encode:
+ *
+ * Arguments:
+ * @p_session: encode session
+ *
+ * Return:
+ * OMX_ERRORTYPE
+ *
+ * Description:
+ * Start the encoding
+ *
+ **/
+static void mm_jpegenc_job_done(mm_jpeg_job_session_t *p_session)
+{
+ mm_jpeg_q_data_t qdata;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+ mm_jpeg_job_q_node_t *node = NULL;
+
+ /*Destroy job related params*/
+ mm_jpegenc_destroy_job(p_session);
+
+ /*remove the job*/
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+ p_session->jobId);
+ if (node) {
+ free(node);
+ }
+ p_session->encoding = OMX_FALSE;
+
+ // Queue to available sessions
+ qdata.p = p_session;
+ mm_jpeg_queue_enq(p_session->session_handle_q, qdata);
+
+ if (p_session->auto_out_buf) {
+ //Queue out buf index
+ qdata.u32 = (uint32_t)(p_session->encode_job.dst_index + 1);
+ mm_jpeg_queue_enq(p_session->out_buf_q, qdata);
+ }
+
+ /* wake up jobMgr thread to work on new job if there is any */
+ cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @session_id: session index
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_session_t *p_session)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = 0;
+ mm_jpeg_job_q_node_t *node = NULL;
+ uint32_t session_id = 0;
+ mm_jpeg_job_session_t *p_cur_sess;
+ char trace_tag[32];
+
+ if (NULL == p_session) {
+ LOGE("invalid session");
+ return rc;
+ }
+
+ session_id = p_session->sessionId;
+
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ /* abort job if in todo queue */
+ LOGD("abort todo jobs");
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+ while (NULL != node) {
+ free(node);
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+ }
+
+ /* abort job if in ongoing queue */
+ LOGD("abort ongoing jobs");
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+ while (NULL != node) {
+ free(node);
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+ }
+
+ /* abort the current session */
+ mm_jpeg_session_abort(p_session);
+
+#ifdef LIB2D_ROTATION_ENABLE
+ lib2d_error lib2d_err = MM_LIB2D_SUCCESS;
+ if (p_session->lib2d_rotation_flag) {
+ lib2d_err = mm_lib2d_deinit(p_session->lib2d_handle);
+ if (lib2d_err != MM_LIB2D_SUCCESS) {
+ LOGE("Error in mm_lib2d_deinit \n");
+ }
+ }
+#endif
+
+ mm_jpeg_session_destroy(p_session);
+
+ p_cur_sess = p_session;
+
+ do {
+ mm_jpeg_remove_session_idx(my_obj, p_cur_sess->sessionId);
+ } while (NULL != (p_cur_sess = p_cur_sess->next_session));
+
+
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ while (1) {
+ qdata = mm_jpeg_queue_deq(p_session->session_handle_q);
+ if (NULL == qdata.p)
+ break;
+ }
+ mm_jpeg_queue_deinit(p_session->session_handle_q);
+ free(p_session->session_handle_q);
+ p_session->session_handle_q = NULL;
+
+ while (1) {
+ qdata = mm_jpeg_queue_deq(p_session->out_buf_q);
+ if (0U == qdata.u32)
+ break;
+ }
+ mm_jpeg_queue_deinit(p_session->out_buf_q);
+ free(p_session->out_buf_q);
+ p_session->out_buf_q = NULL;
+
+
+ /* wake up jobMgr thread to work on new job if there is any */
+ cam_sem_post(&my_obj->job_mgr.job_sem);
+
+ snprintf(trace_tag, sizeof(trace_tag), "Camera:JPEGsession%d", GET_SESSION_IDX(session_id));
+ ATRACE_INT(trace_tag, 0);
+
+ LOGH("destroy session successful. X");
+
+ return rc;
+}
+
+
+
+
+/** mm_jpeg_destroy_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @session_id: session index
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_unlocked(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_session_t *p_session)
+{
+ int32_t rc = -1;
+ mm_jpeg_job_q_node_t *node = NULL;
+ uint32_t session_id = 0;
+ if (NULL == p_session) {
+ LOGE("invalid session");
+ return rc;
+ }
+
+ session_id = p_session->sessionId;
+
+ /* abort job if in todo queue */
+ LOGD("abort todo jobs");
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+ while (NULL != node) {
+ free(node);
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+ }
+
+ /* abort job if in ongoing queue */
+ LOGD("abort ongoing jobs");
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+ while (NULL != node) {
+ free(node);
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+ }
+
+ /* abort the current session */
+ mm_jpeg_session_abort(p_session);
+ //mm_jpeg_remove_session_idx(my_obj, session_id);
+
+ return rc;
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @session_id: session index
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+ mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+
+ return mm_jpeg_destroy_session(my_obj, p_session);
+}
+
+
+
+/** mm_jpeg_close:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Close the jpeg client
+ *
+ **/
+int32_t mm_jpeg_close(mm_jpeg_obj *my_obj, uint32_t client_hdl)
+{
+ int32_t rc = -1;
+ uint8_t clnt_idx = 0;
+ int i = 0;
+
+ /* check if valid client */
+ clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+ if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+ LOGE("invalid client with handler (%d)", client_hdl);
+ return rc;
+ }
+
+ LOGD("E");
+
+ /* abort all jobs from the client */
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+ if (OMX_TRUE == my_obj->clnt_mgr[clnt_idx].session[i].active)
+ mm_jpeg_destroy_session_unlocked(my_obj,
+ &my_obj->clnt_mgr[clnt_idx].session[i]);
+ }
+
+#ifdef LOAD_ADSP_RPC_LIB
+ if (NULL != my_obj->adsprpc_lib_handle) {
+ dlclose(my_obj->adsprpc_lib_handle);
+ my_obj->adsprpc_lib_handle = NULL;
+ }
+#endif
+
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ /* invalidate client session */
+ pthread_mutex_destroy(&my_obj->clnt_mgr[clnt_idx].lock);
+ memset(&my_obj->clnt_mgr[clnt_idx], 0, sizeof(mm_jpeg_client_t));
+
+ rc = 0;
+ LOGD("X");
+ return rc;
+}
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE *pBuffer)
+{
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+ LOGH("count %d ", p_session->ebd_count);
+ pthread_mutex_lock(&p_session->lock);
+ p_session->ebd_count++;
+ pthread_mutex_unlock(&p_session->lock);
+ return 0;
+}
+
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE *pBuffer)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+ mm_jpeg_output_t output_buf;
+ LOGI("count %d ", p_session->fbd_count);
+ LOGI("KPI Perf] : PROFILE_JPEG_FBD");
+
+ pthread_mutex_lock(&p_session->lock);
+ KPI_ATRACE_INT("Camera:JPEG",
+ (int32_t)((uint32_t)GET_SESSION_IDX(
+ p_session->sessionId)<<16 | --p_session->job_index));
+ if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+ pthread_mutex_unlock(&p_session->lock);
+ return ret;
+ }
+#ifdef MM_JPEG_DUMP_OUT_BS
+ char filename[256];
+ static int bsc;
+ snprintf(filename, sizeof(filename),
+ QCAMERA_DUMP_FRM_LOCATION"jpeg/mm_jpeg_bs%d.jpg", bsc++);
+ DUMP_TO_FILE(filename,
+ pBuffer->pBuffer,
+ (size_t)(uint32_t)pBuffer->nFilledLen);
+#endif
+
+ p_session->fbd_count++;
+ if (NULL != p_session->params.jpeg_cb) {
+
+ p_session->job_status = JPEG_JOB_STATUS_DONE;
+ output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+ output_buf.buf_vaddr = pBuffer->pBuffer;
+ output_buf.fd = -1;
+ LOGH("send jpeg callback %d buf 0x%p len %u JobID %u",
+ p_session->job_status, pBuffer->pBuffer,
+ (unsigned int)pBuffer->nFilledLen, p_session->jobId);
+ p_session->params.jpeg_cb(p_session->job_status,
+ p_session->client_hdl,
+ p_session->jobId,
+ &output_buf,
+ p_session->params.userdata);
+
+ mm_jpegenc_job_done(p_session);
+
+ mm_jpeg_put_mem((void *)p_session);
+ }
+ pthread_mutex_unlock(&p_session->lock);
+
+ return ret;
+}
+
+
+
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_EVENTTYPE eEvent,
+ OMX_U32 nData1,
+ OMX_U32 nData2,
+ OMX_PTR pEventData)
+{
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+ LOGD("%d %d %d state %d", eEvent, (int)nData1,
+ (int)nData2, p_session->abort_state);
+
+ pthread_mutex_lock(&p_session->lock);
+
+ if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+ p_session->abort_state = MM_JPEG_ABORT_DONE;
+ pthread_cond_signal(&p_session->cond);
+ pthread_mutex_unlock(&p_session->lock);
+ return OMX_ErrorNone;
+ }
+
+ if (eEvent == OMX_EventError) {
+ p_session->error_flag = nData2;
+ if (p_session->encoding == OMX_TRUE) {
+ LOGE("Error during encoding");
+
+ /* send jpeg callback */
+ if (NULL != p_session->params.jpeg_cb) {
+ p_session->job_status = JPEG_JOB_STATUS_ERROR;
+ LOGE("send jpeg error callback %d",
+ p_session->job_status);
+ p_session->params.jpeg_cb(p_session->job_status,
+ p_session->client_hdl,
+ p_session->jobId,
+ NULL,
+ p_session->params.userdata);
+ }
+
+ /* remove from ready queue */
+ mm_jpegenc_job_done(p_session);
+ }
+ pthread_cond_signal(&p_session->cond);
+ } else if (eEvent == OMX_EventCmdComplete) {
+ if (p_session->state_change_pending == OMX_TRUE) {
+ p_session->state_change_pending = OMX_FALSE;
+ pthread_cond_signal(&p_session->cond);
+ }
+ }
+
+ pthread_mutex_unlock(&p_session->lock);
+ return OMX_ErrorNone;
+}
+
+
+
+/* remove the first job from the queue with matching client handle */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+ mm_jpeg_queue_t* queue, uint32_t client_hdl)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+ if (data && (data->enc_info.client_handle == client_hdl)) {
+ LOGH("found matching client handle");
+ job_node = data;
+ cam_list_del_node(&node->list);
+ queue->size--;
+ free(node);
+ LOGH("queue size = %d", queue->size);
+ break;
+ }
+ pos = pos->next;
+ }
+
+ pthread_mutex_unlock(&queue->lock);
+
+ return job_node;
+}
+
+/* remove the first job from the queue with matching session id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+ mm_jpeg_queue_t* queue, uint32_t session_id)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+ if (data && (data->enc_info.encode_job.session_id == session_id)) {
+ LOGH("found matching session id");
+ job_node = data;
+ cam_list_del_node(&node->list);
+ queue->size--;
+ free(node);
+ LOGH("queue size = %d", queue->size);
+ break;
+ }
+ pos = pos->next;
+ }
+
+ pthread_mutex_unlock(&queue->lock);
+
+ return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+ mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ uint32_t lq_job_id;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+ if(NULL == data) {
+ LOGE("Data is NULL");
+ pthread_mutex_unlock(&queue->lock);
+ return NULL;
+ }
+
+ if (data->type == MM_JPEG_CMD_TYPE_DECODE_JOB) {
+ lq_job_id = data->dec_info.job_id;
+ } else {
+ lq_job_id = data->enc_info.job_id;
+ }
+
+ if (data && (lq_job_id == job_id)) {
+ LOGD("found matching job id");
+ job_node = data;
+ cam_list_del_node(&node->list);
+ queue->size--;
+ free(node);
+ break;
+ }
+ pos = pos->next;
+ }
+
+ pthread_mutex_unlock(&queue->lock);
+
+ return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+ mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+ if (data && (data->enc_info.job_id == job_id)) {
+ job_node = data;
+ cam_list_del_node(&node->list);
+ queue->size--;
+ free(node);
+ break;
+ }
+ pos = pos->next;
+ }
+
+ return job_node;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
new file mode 100644
index 0000000..e56fc24
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -0,0 +1,652 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <string.h>
+#include <math.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+
+#define LOWER(a) ((a) & 0xFFFF)
+#define UPPER(a) (((a)>>16) & 0xFFFF)
+#define CHANGE_ENDIAN_16(a) ((0x00FF & ((a)>>8)) | (0xFF00 & ((a)<<8)))
+#define ROUND(a) \
+ ((a >= 0) ? (uint32_t)(a + 0.5) : (uint32_t)(a - 0.5))
+
+
+/** addExifEntry:
+ *
+ * Arguments:
+ * @exif_info : Exif info struct
+ * @p_session: job session
+ * @tagid : exif tag ID
+ * @type : data type
+ * @count : number of data in uint of its type
+ * @data : input data ptr
+ *
+ * Retrun : int32_t type of status
+ * 0 -- success
+ * none-zero failure code
+ *
+ * Description:
+ * Function to add an entry to exif data
+ *
+ **/
+int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+ exif_tag_type_t type, uint32_t count, void *data)
+{
+ int32_t rc = 0;
+ uint32_t numOfEntries = (uint32_t)p_exif_info->numOfEntries;
+ QEXIF_INFO_DATA *p_info_data = p_exif_info->exif_data;
+ if(numOfEntries >= MAX_EXIF_TABLE_ENTRIES) {
+ LOGE("Number of entries exceeded limit");
+ return -1;
+ }
+
+ p_info_data[numOfEntries].tag_id = tagid;
+ p_info_data[numOfEntries].tag_entry.type = type;
+ p_info_data[numOfEntries].tag_entry.count = count;
+ p_info_data[numOfEntries].tag_entry.copy = 1;
+ switch (type) {
+ case EXIF_BYTE: {
+ if (count > 1) {
+ uint8_t *values = (uint8_t *)malloc(count);
+ if (values == NULL) {
+ LOGE("No memory for byte array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count);
+ p_info_data[numOfEntries].tag_entry.data._bytes = values;
+ }
+ } else {
+ p_info_data[numOfEntries].tag_entry.data._byte = *(uint8_t *)data;
+ }
+ }
+ break;
+ case EXIF_ASCII: {
+ char *str = NULL;
+ str = (char *)malloc(count + 1);
+ if (str == NULL) {
+ LOGE("No memory for ascii string");
+ rc = -1;
+ } else {
+ memset(str, 0, count + 1);
+ memcpy(str, data, count);
+ p_info_data[numOfEntries].tag_entry.data._ascii = str;
+ }
+ }
+ break;
+ case EXIF_SHORT: {
+ if (count > 1) {
+ uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+ if (values == NULL) {
+ LOGE("No memory for short array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count * sizeof(uint16_t));
+ p_info_data[numOfEntries].tag_entry.data._shorts = values;
+ }
+ } else {
+ p_info_data[numOfEntries].tag_entry.data._short = *(uint16_t *)data;
+ }
+ }
+ break;
+ case EXIF_LONG: {
+ if (count > 1) {
+ uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+ if (values == NULL) {
+ LOGE("No memory for long array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count * sizeof(uint32_t));
+ p_info_data[numOfEntries].tag_entry.data._longs = values;
+ }
+ } else {
+ p_info_data[numOfEntries].tag_entry.data._long = *(uint32_t *)data;
+ }
+ }
+ break;
+ case EXIF_RATIONAL: {
+ if (count > 1) {
+ rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+ if (values == NULL) {
+ LOGE("No memory for rational array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count * sizeof(rat_t));
+ p_info_data[numOfEntries].tag_entry.data._rats = values;
+ }
+ } else {
+ p_info_data[numOfEntries].tag_entry.data._rat = *(rat_t *)data;
+ }
+ }
+ break;
+ case EXIF_UNDEFINED: {
+ uint8_t *values = (uint8_t *)malloc(count);
+ if (values == NULL) {
+ LOGE("No memory for undefined array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count);
+ p_info_data[numOfEntries].tag_entry.data._undefined = values;
+ }
+ }
+ break;
+ case EXIF_SLONG: {
+ if (count > 1) {
+ int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+ if (values == NULL) {
+ LOGE("No memory for signed long array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count * sizeof(int32_t));
+ p_info_data[numOfEntries].tag_entry.data._slongs = values;
+ }
+ } else {
+ p_info_data[numOfEntries].tag_entry.data._slong = *(int32_t *)data;
+ }
+ }
+ break;
+ case EXIF_SRATIONAL: {
+ if (count > 1) {
+ srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+ if (values == NULL) {
+ LOGE("No memory for signed rational array");
+ rc = -1;
+ } else {
+ memcpy(values, data, count * sizeof(srat_t));
+ p_info_data[numOfEntries].tag_entry.data._srats = values;
+ }
+ } else {
+ p_info_data[numOfEntries].tag_entry.data._srat = *(srat_t *)data;
+ }
+ }
+ break;
+ }
+
+ // Increase number of entries
+ p_exif_info->numOfEntries++;
+ return rc;
+}
+
+/** releaseExifEntry
+ *
+ * Arguments:
+ * @p_exif_data : Exif info struct
+ *
+ * Retrun : int32_t type of status
+ * 0 -- success
+ * none-zero failure code
+ *
+ * Description:
+ * Function to release an entry from exif data
+ *
+ **/
+int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data)
+{
+ switch (p_exif_data->tag_entry.type) {
+ case EXIF_BYTE: {
+ if (p_exif_data->tag_entry.count > 1 &&
+ p_exif_data->tag_entry.data._bytes != NULL) {
+ free(p_exif_data->tag_entry.data._bytes);
+ p_exif_data->tag_entry.data._bytes = NULL;
+ }
+ }
+ break;
+ case EXIF_ASCII: {
+ if (p_exif_data->tag_entry.data._ascii != NULL) {
+ free(p_exif_data->tag_entry.data._ascii);
+ p_exif_data->tag_entry.data._ascii = NULL;
+ }
+ }
+ break;
+ case EXIF_SHORT: {
+ if (p_exif_data->tag_entry.count > 1 &&
+ p_exif_data->tag_entry.data._shorts != NULL) {
+ free(p_exif_data->tag_entry.data._shorts);
+ p_exif_data->tag_entry.data._shorts = NULL;
+ }
+ }
+ break;
+ case EXIF_LONG: {
+ if (p_exif_data->tag_entry.count > 1 &&
+ p_exif_data->tag_entry.data._longs != NULL) {
+ free(p_exif_data->tag_entry.data._longs);
+ p_exif_data->tag_entry.data._longs = NULL;
+ }
+ }
+ break;
+ case EXIF_RATIONAL: {
+ if (p_exif_data->tag_entry.count > 1 &&
+ p_exif_data->tag_entry.data._rats != NULL) {
+ free(p_exif_data->tag_entry.data._rats);
+ p_exif_data->tag_entry.data._rats = NULL;
+ }
+ }
+ break;
+ case EXIF_UNDEFINED: {
+ if (p_exif_data->tag_entry.data._undefined != NULL) {
+ free(p_exif_data->tag_entry.data._undefined);
+ p_exif_data->tag_entry.data._undefined = NULL;
+ }
+ }
+ break;
+ case EXIF_SLONG: {
+ if (p_exif_data->tag_entry.count > 1 &&
+ p_exif_data->tag_entry.data._slongs != NULL) {
+ free(p_exif_data->tag_entry.data._slongs);
+ p_exif_data->tag_entry.data._slongs = NULL;
+ }
+ }
+ break;
+ case EXIF_SRATIONAL: {
+ if (p_exif_data->tag_entry.count > 1 &&
+ p_exif_data->tag_entry.data._srats != NULL) {
+ free(p_exif_data->tag_entry.data._srats);
+ p_exif_data->tag_entry.data._srats = NULL;
+ }
+ }
+ break;
+ } /*end of switch*/
+
+ return 0;
+}
+
+/** process_sensor_data:
+ *
+ * Arguments:
+ * @p_sensor_params : ptr to sensor data
+ *
+ * Return : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * Description:
+ * process sensor data
+ *
+ * Notes: this needs to be filled for the metadata
+ **/
+int process_sensor_data(cam_sensor_params_t *p_sensor_params,
+ QOMX_EXIF_INFO *exif_info)
+{
+ int rc = 0;
+ rat_t val_rat;
+
+ if (NULL == p_sensor_params) {
+ LOGE("Sensor params are null");
+ return 0;
+ }
+
+ LOGD("From metadata aperture = %f ",
+ p_sensor_params->aperture_value );
+
+ if (p_sensor_params->aperture_value >= 1.0) {
+ double apex_value;
+ apex_value = (double)2.0 * log(p_sensor_params->aperture_value) / log(2.0);
+ val_rat.num = (uint32_t)(apex_value * 100);
+ val_rat.denom = 100;
+ rc = addExifEntry(exif_info, EXIFTAGID_APERTURE, EXIF_RATIONAL, 1, &val_rat);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ val_rat.num = (uint32_t)(p_sensor_params->aperture_value * 100);
+ val_rat.denom = 100;
+ rc = addExifEntry(exif_info, EXIFTAGID_F_NUMBER, EXIF_RATIONAL, 1, &val_rat);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+ }
+
+ /*Flash*/
+ short val_short = 0;
+ int flash_mode_exif, flash_fired;
+ if (p_sensor_params->flash_state == CAM_FLASH_STATE_FIRED) {
+ flash_fired = 1;
+ } else {
+ flash_fired = 0;
+ }
+ LOGD("Flash mode %d flash state %d",
+ p_sensor_params->flash_mode, p_sensor_params->flash_state);
+
+ switch(p_sensor_params->flash_mode) {
+ case CAM_FLASH_MODE_OFF:
+ flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_OFF;
+ break;
+ case CAM_FLASH_MODE_ON:
+ flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_ON;
+ break;
+ case CAM_FLASH_MODE_AUTO:
+ flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_AUTO;
+ break;
+ default:
+ flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_AUTO;
+ LOGE(": Unsupported flash mode");
+ }
+ val_short = (short)(flash_fired | (flash_mode_exif << 3));
+
+ rc = addExifEntry(exif_info, EXIFTAGID_FLASH, EXIF_SHORT, 1, &val_short);
+ if (rc) {
+ LOGE(": Error adding flash exif entry");
+ }
+ /* Sensing Method */
+ val_short = (short) p_sensor_params->sensing_method;
+ rc = addExifEntry(exif_info, EXIFTAGID_SENSING_METHOD, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding flash Exif Entry");
+ }
+
+ /* Focal Length in 35 MM Film */
+ val_short = (short)
+ ((p_sensor_params->focal_length * p_sensor_params->crop_factor) + 0.5f);
+ rc = addExifEntry(exif_info, EXIFTAGID_FOCAL_LENGTH_35MM, EXIF_SHORT,
+ 1, &val_short);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /* F Number */
+ val_rat.num = (uint32_t)(p_sensor_params->f_number * 100);
+ val_rat.denom = 100;
+ rc = addExifEntry(exif_info, EXIFTAGTYPE_F_NUMBER, EXIF_RATIONAL, 1, &val_rat);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+ return rc;
+}
+
+
+/** process_3a_data:
+ *
+ * Arguments:
+ * @p_3a_params : ptr to 3a data
+ * @exif_info : Exif info struct
+ *
+ * Return : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * Description:
+ * process 3a data
+ *
+ * Notes: this needs to be filled for the metadata
+ **/
+int process_3a_data(cam_3a_params_t *p_3a_params, QOMX_EXIF_INFO *exif_info)
+{
+ int rc = 0;
+ srat_t val_srat;
+ rat_t val_rat;
+ double shutter_speed_value;
+
+ if (NULL == p_3a_params) {
+ LOGE("3A params are null");
+ return 0;
+ }
+
+ LOGD("exp_time %f, iso_value %d, wb_mode %d",
+ p_3a_params->exp_time, p_3a_params->iso_value, p_3a_params->wb_mode);
+
+ /* Exposure time */
+ if (p_3a_params->exp_time <= 0.0f) {
+ val_rat.num = 0;
+ val_rat.denom = 0;
+ } else if (p_3a_params->exp_time < 1.0f) {
+ val_rat.num = 1;
+ val_rat.denom = ROUND(1.0/p_3a_params->exp_time);
+ } else {
+ val_rat.num = ROUND(p_3a_params->exp_time);
+ val_rat.denom = 1;
+ }
+ LOGD("numer %d denom %d %zd", val_rat.num, val_rat.denom,
+ sizeof(val_rat) / (8));
+
+ rc = addExifEntry(exif_info, EXIFTAGID_EXPOSURE_TIME, EXIF_RATIONAL,
+ (sizeof(val_rat)/(8)), &val_rat);
+ if (rc) {
+ LOGE(": Error adding Exif Entry Exposure time");
+ }
+
+ /* Shutter Speed*/
+ if (p_3a_params->exp_time > 0) {
+ shutter_speed_value = log10(1/p_3a_params->exp_time)/log10(2);
+ val_srat.num = (int32_t)(shutter_speed_value * 1000);
+ val_srat.denom = 1000;
+ } else {
+ val_srat.num = 0;
+ val_srat.denom = 0;
+ }
+ rc = addExifEntry(exif_info, EXIFTAGID_SHUTTER_SPEED, EXIF_SRATIONAL,
+ (sizeof(val_srat)/(8)), &val_srat);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /*ISO*/
+ short val_short;
+ val_short = (short)p_3a_params->iso_value;
+ rc = addExifEntry(exif_info, EXIFTAGID_ISO_SPEED_RATING, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /*WB mode*/
+ if (p_3a_params->wb_mode == CAM_WB_MODE_AUTO)
+ val_short = 0;
+ else
+ val_short = 1;
+ rc = addExifEntry(exif_info, EXIFTAGID_WHITE_BALANCE, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /* Metering Mode */
+ val_short = (short) p_3a_params->metering_mode;
+ rc = addExifEntry(exif_info,EXIFTAGID_METERING_MODE, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /*Exposure Program*/
+ val_short = (short) p_3a_params->exposure_program;
+ rc = addExifEntry(exif_info,EXIFTAGID_EXPOSURE_PROGRAM, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /*Exposure Mode */
+ val_short = (short) p_3a_params->exposure_mode;
+ rc = addExifEntry(exif_info,EXIFTAGID_EXPOSURE_MODE, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ /*Scenetype*/
+ uint8_t val_undef;
+ val_undef = (uint8_t) p_3a_params->scenetype;
+ rc = addExifEntry(exif_info,EXIFTAGID_SCENE_TYPE, EXIF_UNDEFINED,
+ sizeof(val_undef), &val_undef);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ LOGD("brightness %f",
+ p_3a_params->brightness);
+
+ /* Brightness Value*/
+ val_srat.num = (int32_t) (p_3a_params->brightness * 100.0f);
+ val_srat.denom = 100;
+ rc = addExifEntry(exif_info,EXIFTAGID_BRIGHTNESS, EXIF_SRATIONAL,
+ (sizeof(val_srat)/(8)), &val_srat);
+ if (rc) {
+ LOGE(": Error adding Exif Entry");
+ }
+
+ return rc;
+}
+
+/** process_meta_data
+ *
+ * Arguments:
+ * @p_meta : ptr to metadata
+ * @exif_info: Exif info struct
+ * @mm_jpeg_exif_params: exif params
+ *
+ * Return : int32_t type of status
+ * NO_ERROR -- success
+ * none-zero failure code
+ *
+ * Description:
+ * Extract exif data from the metadata
+ **/
+int process_meta_data(metadata_buffer_t *p_meta, QOMX_EXIF_INFO *exif_info,
+ mm_jpeg_exif_params_t *p_cam_exif_params, cam_hal_version_t hal_version)
+{
+ int rc = 0;
+ cam_sensor_params_t p_sensor_params;
+ cam_3a_params_t p_3a_params;
+ bool is_3a_meta_valid = false, is_sensor_meta_valid = false;
+
+ memset(&p_3a_params, 0, sizeof(cam_3a_params_t));
+ memset(&p_sensor_params, 0, sizeof(cam_sensor_params_t));
+
+ if (p_meta) {
+ /* for HAL V1*/
+ if (hal_version == CAM_HAL_V1) {
+
+ IF_META_AVAILABLE(cam_3a_params_t, l_3a_params, CAM_INTF_META_AEC_INFO,
+ p_meta) {
+ p_3a_params = *l_3a_params;
+ is_3a_meta_valid = true;
+ }
+
+ IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, p_meta) {
+ p_3a_params.wb_mode = *wb_mode;
+ }
+
+ IF_META_AVAILABLE(cam_sensor_params_t, l_sensor_params,
+ CAM_INTF_META_SENSOR_INFO, p_meta) {
+ p_sensor_params = *l_sensor_params;
+ is_sensor_meta_valid = true;
+ }
+ } else {
+ /* HAL V3 */
+ IF_META_AVAILABLE(int32_t, iso, CAM_INTF_META_SENSOR_SENSITIVITY, p_meta) {
+ p_3a_params.iso_value= *iso;
+ } else {
+ LOGE("Cannot extract Iso value");
+ }
+
+ IF_META_AVAILABLE(int64_t, sensor_exposure_time,
+ CAM_INTF_META_SENSOR_EXPOSURE_TIME, p_meta) {
+ p_3a_params.exp_time =
+ (float)((double)(*sensor_exposure_time) / 1000000000.0);
+ } else {
+ LOGE("Cannot extract Exp time value");
+ }
+
+ IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, p_meta) {
+ p_3a_params.wb_mode = *wb_mode;
+ } else {
+ LOGE("Cannot extract white balance mode");
+ }
+
+ /* Process sensor data */
+ IF_META_AVAILABLE(float, aperture, CAM_INTF_META_LENS_APERTURE, p_meta) {
+ p_sensor_params.aperture_value = *aperture;
+ } else {
+ LOGE("Cannot extract Aperture value");
+ }
+
+ IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, p_meta) {
+ p_sensor_params.flash_mode = *flash_mode;
+ } else {
+ LOGE("Cannot extract flash mode value");
+ }
+
+ IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, p_meta) {
+ p_sensor_params.flash_state = (cam_flash_state_t) *flash_state;
+ } else {
+ LOGE("Cannot extract flash state value");
+ }
+ }
+ }
+
+ /* take the cached values if meta is invalid */
+ if ((!is_3a_meta_valid) && (hal_version == CAM_HAL_V1)) {
+ p_3a_params = p_cam_exif_params->cam_3a_params;
+ LOGW("Warning using cached values for 3a");
+ }
+
+ if ((!is_sensor_meta_valid) && (hal_version == CAM_HAL_V1)) {
+ p_sensor_params = p_cam_exif_params->sensor_params;
+ LOGW("Warning using cached values for sensor");
+ }
+
+ if ((hal_version != CAM_HAL_V1) || (p_sensor_params.sens_type != CAM_SENSOR_YUV)) {
+ rc = process_3a_data(&p_3a_params, exif_info);
+ if (rc) {
+ LOGE("Failed to add 3a exif params");
+ }
+ }
+
+ rc = process_sensor_data(&p_sensor_params, exif_info);
+ if (rc) {
+ LOGE("Failed to extract sensor params");
+ }
+
+ if (p_meta) {
+ short val_short = 0;
+ cam_asd_decision_t *scene_info = NULL;
+
+ IF_META_AVAILABLE(cam_asd_decision_t, scene_cap_type,
+ CAM_INTF_META_ASD_SCENE_INFO, p_meta) {
+ scene_info = (cam_asd_decision_t*)scene_cap_type;
+ val_short = (short) scene_info->detected_scene;
+ }
+
+ rc = addExifEntry(exif_info, EXIFTAGID_SCENE_CAPTURE_TYPE, EXIF_SHORT,
+ sizeof(val_short)/2, &val_short);
+ if (rc) {
+ LOGE(": Error adding ASD Exif Entry");
+ }
+ } else {
+ LOGE(": Error adding ASD Exif Entry, no meta");
+ }
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
new file mode 100644
index 0000000..5655c49
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
@@ -0,0 +1,409 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <stdlib.h>
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_mpo.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+static mm_jpeg_obj* g_jpeg_obj = NULL;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+volatile uint32_t gKpiDebugLevel = 0;
+
+/** mm_jpeg_util_generate_handler:
+ *
+ * Arguments:
+ * @index: client index
+ *
+ * Return:
+ * handle value
+ *
+ * Description:
+ * utility function to generate handler
+ *
+ **/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index)
+{
+ uint32_t handler = 0;
+ pthread_mutex_lock(&g_handler_lock);
+ g_handler_history_count++;
+ if (0 == g_handler_history_count) {
+ g_handler_history_count++;
+ }
+ handler = g_handler_history_count;
+ handler = (handler<<8) | index;
+ pthread_mutex_unlock(&g_handler_lock);
+ return handler;
+}
+
+/** mm_jpeg_util_get_index_by_handler:
+ *
+ * Arguments:
+ * @handler: handle value
+ *
+ * Return:
+ * client index
+ *
+ * Description:
+ * get client index
+ *
+ **/
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler)
+{
+ return (handler & 0x000000ff);
+}
+
+/** mm_jpeg_intf_start_job:
+ *
+ * Arguments:
+ * @client_hdl: client handle
+ * @job: jpeg job object
+ * @jobId: job id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * start the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+ int32_t rc = -1;
+
+ if (NULL == job ||
+ NULL == job_id) {
+ LOGE("invalid parameters for job or jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+ rc = mm_jpeg_start_job(g_jpeg_obj, job, job_id);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ * Arguments:
+ * @client_hdl: client handle
+ * @p_params: encode parameters
+ * @p_session_id: session id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Create new jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_create_session(uint32_t client_hdl,
+ mm_jpeg_encode_params_t *p_params,
+ uint32_t *p_session_id)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+ LOGE("invalid client_hdl or jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_create_session(g_jpeg_obj, client_hdl, p_params, p_session_id);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ * Arguments:
+ * @session_id: session id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_destroy_session(uint32_t session_id)
+{
+ int32_t rc = -1;
+
+ if (0 == session_id) {
+ LOGE("invalid client_hdl or jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_destroy_session_by_id(g_jpeg_obj, session_id);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_abort_job:
+ *
+ * Arguments:
+ * @jobId: job id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_abort_job(uint32_t job_id)
+{
+ int32_t rc = -1;
+
+ if (0 == job_id) {
+ LOGE("invalid jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_abort_job(g_jpeg_obj, job_id);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ * Arguments:
+ * @client_hdl: client handle
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Close the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_close(uint32_t client_hdl)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl) {
+ LOGE("invalid client_hdl");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_close(g_jpeg_obj, client_hdl);
+ g_jpeg_obj->num_clients--;
+ if(0 == rc) {
+ if (0 == g_jpeg_obj->num_clients) {
+ /* No client, close jpeg internally */
+ rc = mm_jpeg_deinit(g_jpeg_obj);
+ free(g_jpeg_obj);
+ g_jpeg_obj = NULL;
+ }
+ }
+
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_compose_mpo:
+ *
+ * Arguments:
+ * @mpo_info : MPO Information
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Compose MPO image from jpeg images
+ *
+ **/
+static int32_t mm_jpeg_intf_compose_mpo(mm_jpeg_mpo_info_t *mpo_info)
+{
+ int32_t rc = -1;
+ if (!mpo_info) {
+ LOGE("Invalid input");
+ return rc;
+ }
+
+ if (mpo_info->num_of_images > MM_JPEG_MAX_MPO_IMAGES) {
+ LOGE("Num of images exceeds max supported images in MPO");
+ return rc;
+ }
+ //Call MPo composition
+ rc = mm_jpeg_mpo_compose(mpo_info);
+
+ return rc;
+}
+
+/** jpeg_open:
+ *
+ * Arguments:
+ * @ops: ops table pointer
+ * @mpo_ops: mpo ops table ptr
+ * @picture_size: Max available dim
+ * @jpeg_metadata: Jpeg meta data
+ *
+ * Return:
+ * 0 failure, success otherwise
+ *
+ * Description:
+ * Open a jpeg client. Jpeg meta data will be cached
+ * but memory manegement has to be done by the cient.
+ *
+ **/
+uint32_t jpeg_open(mm_jpeg_ops_t *ops, mm_jpeg_mpo_ops_t *mpo_ops,
+ mm_dimension picture_size,
+ cam_jpeg_metadata_t *jpeg_metadata)
+{
+ int32_t rc = 0;
+ uint32_t clnt_hdl = 0;
+ mm_jpeg_obj* jpeg_obj = NULL;
+ char prop[PROPERTY_VALUE_MAX];
+
+ property_get("persist.camera.kpi.debug", prop, "0");
+ gKpiDebugLevel = atoi(prop);
+
+ pthread_mutex_lock(&g_intf_lock);
+ /* first time open */
+ if(NULL == g_jpeg_obj) {
+ jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+ if(NULL == jpeg_obj) {
+ LOGE("no mem");
+ pthread_mutex_unlock(&g_intf_lock);
+ return clnt_hdl;
+ }
+
+ /* initialize jpeg obj */
+ memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+
+ /* by default reuse reproc source buffer if available */
+ if (mpo_ops == NULL) {
+ jpeg_obj->reuse_reproc_buffer = 1;
+ } else {
+ jpeg_obj->reuse_reproc_buffer = 0;
+ }
+ LOGH("reuse_reproc_buffer %d ",
+ jpeg_obj->reuse_reproc_buffer);
+
+ /* used for work buf calculation */
+ jpeg_obj->max_pic_w = picture_size.w;
+ jpeg_obj->max_pic_h = picture_size.h;
+
+ /*Cache OTP Data for the session*/
+ if (NULL != jpeg_metadata) {
+ jpeg_obj->jpeg_metadata = jpeg_metadata;
+ }
+
+ rc = mm_jpeg_init(jpeg_obj);
+ if(0 != rc) {
+ LOGE("mm_jpeg_init err = %d", rc);
+ free(jpeg_obj);
+ pthread_mutex_unlock(&g_intf_lock);
+ return clnt_hdl;
+ }
+
+ /* remember in global variable */
+ g_jpeg_obj = jpeg_obj;
+ }
+
+ /* open new client */
+ clnt_hdl = mm_jpeg_new_client(g_jpeg_obj);
+ if (clnt_hdl > 0) {
+ /* valid client */
+ if (NULL != ops) {
+ /* fill in ops tbl if ptr not NULL */
+ ops->start_job = mm_jpeg_intf_start_job;
+ ops->abort_job = mm_jpeg_intf_abort_job;
+ ops->create_session = mm_jpeg_intf_create_session;
+ ops->destroy_session = mm_jpeg_intf_destroy_session;
+ ops->close = mm_jpeg_intf_close;
+ }
+ if (NULL != mpo_ops) {
+ mpo_ops->compose_mpo = mm_jpeg_intf_compose_mpo;
+ }
+ } else {
+ /* failed new client */
+ LOGE("mm_jpeg_new_client failed");
+
+ if (0 == g_jpeg_obj->num_clients) {
+ /* no client, close jpeg */
+ mm_jpeg_deinit(g_jpeg_obj);
+ free(g_jpeg_obj);
+ g_jpeg_obj = NULL;
+ }
+ }
+
+ pthread_mutex_unlock(&g_intf_lock);
+ return clnt_hdl;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c
new file mode 100644
index 0000000..34702e7
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c
@@ -0,0 +1,206 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <errno.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <linux/msm_ion.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+
+// JPEG dependencies
+#include "mm_jpeg_ionbuf.h"
+
+/** buffer_allocate:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * buffer address
+ *
+ * Description:
+ * allocates ION buffer
+ *
+ **/
+void *buffer_allocate(buffer_t *p_buffer, int cached)
+{
+ void *l_buffer = NULL;
+
+ int lrc = 0;
+ struct ion_handle_data lhandle_data;
+
+ p_buffer->alloc.len = p_buffer->size;
+ p_buffer->alloc.align = 4096;
+ p_buffer->alloc.flags = (cached) ? ION_FLAG_CACHED : 0;
+ p_buffer->alloc.heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+
+ p_buffer->ion_fd = open("/dev/ion", O_RDONLY);
+ if(p_buffer->ion_fd < 0) {
+ LOGE("Ion open failed");
+ goto ION_ALLOC_FAILED;
+ }
+
+ /* Make it page size aligned */
+ p_buffer->alloc.len = (p_buffer->alloc.len + 4095U) & (~4095U);
+ lrc = ioctl(p_buffer->ion_fd, ION_IOC_ALLOC, &p_buffer->alloc);
+ if (lrc < 0) {
+ LOGE("ION allocation failed len %zu",
+ p_buffer->alloc.len);
+ goto ION_ALLOC_FAILED;
+ }
+
+ p_buffer->ion_info_fd.handle = p_buffer->alloc.handle;
+ lrc = ioctl(p_buffer->ion_fd, ION_IOC_SHARE,
+ &p_buffer->ion_info_fd);
+ if (lrc < 0) {
+ LOGE("ION map failed %s", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+
+ p_buffer->p_pmem_fd = p_buffer->ion_info_fd.fd;
+
+ l_buffer = mmap(NULL, p_buffer->alloc.len, PROT_READ | PROT_WRITE,
+ MAP_SHARED,p_buffer->p_pmem_fd, 0);
+
+ if (l_buffer == MAP_FAILED) {
+ LOGE("ION_MMAP_FAILED: %s (%d)",
+ strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }
+
+ return l_buffer;
+
+ION_MAP_FAILED:
+ lhandle_data.handle = p_buffer->ion_info_fd.handle;
+ ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+ return NULL;
+ION_ALLOC_FAILED:
+ return NULL;
+
+}
+
+/** buffer_deallocate:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * buffer address
+ *
+ * Description:
+ * deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_t *p_buffer)
+{
+ int lrc = 0;
+ size_t lsize = (p_buffer->size + 4095U) & (~4095U);
+
+ struct ion_handle_data lhandle_data;
+ lrc = munmap(p_buffer->addr, lsize);
+
+ close(p_buffer->ion_info_fd.fd);
+
+ lhandle_data.handle = p_buffer->ion_info_fd.handle;
+ ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+
+ close(p_buffer->ion_fd);
+ return lrc;
+}
+
+/** buffer_invalidate:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * error val
+ *
+ * Description:
+ * Invalidates the cached buffer
+ *
+ **/
+int buffer_invalidate(buffer_t *p_buffer)
+{
+ int lrc = 0;
+ struct ion_flush_data cache_inv_data;
+ struct ion_custom_data custom_data;
+
+ memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+ memset(&custom_data, 0, sizeof(custom_data));
+ cache_inv_data.vaddr = p_buffer->addr;
+ cache_inv_data.fd = p_buffer->ion_info_fd.fd;
+ cache_inv_data.handle = p_buffer->ion_info_fd.handle;
+ cache_inv_data.length = (unsigned int)p_buffer->size;
+ custom_data.cmd = (unsigned int)ION_IOC_INV_CACHES;
+ custom_data.arg = (unsigned long)&cache_inv_data;
+
+ lrc = ioctl(p_buffer->ion_fd, ION_IOC_CUSTOM, &custom_data);
+ if (lrc < 0)
+ LOGW("Cache Invalidate failed: %s\n", strerror(errno));
+
+ return lrc;
+}
+
+/** buffer_clean:
+ *
+ * Arguments:
+ * @p_buffer: ION buffer
+ *
+ * Return:
+ * error val
+ *
+ * Description:
+ * Clean the cached buffer
+ *
+ **/
+int buffer_clean(buffer_t *p_buffer)
+{
+ int lrc = 0;
+ struct ion_flush_data cache_clean_data;
+ struct ion_custom_data custom_data;
+
+ memset(&cache_clean_data, 0, sizeof(cache_clean_data));
+ memset(&custom_data, 0, sizeof(custom_data));
+ cache_clean_data.vaddr = p_buffer->addr;
+ cache_clean_data.fd = p_buffer->ion_info_fd.fd;
+ cache_clean_data.handle = p_buffer->ion_info_fd.handle;
+ cache_clean_data.length = (unsigned int)p_buffer->size;
+ custom_data.cmd = (unsigned int)ION_IOC_CLEAN_CACHES;
+ custom_data.arg = (unsigned long)&cache_clean_data;
+
+ lrc = ioctl(p_buffer->ion_fd, ION_IOC_CUSTOM, &custom_data);
+ if (lrc < 0)
+ LOGW("Cache clean failed: %s\n", strerror(errno));
+
+ return lrc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c
new file mode 100644
index 0000000..fb9c222
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c
@@ -0,0 +1,414 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_mpo.h"
+
+#define M_APP0 0xe0
+#define M_APP1 0xe1
+#define M_APP2 0xe2
+#define M_EOI 0xd9
+#define M_SOI 0xd8
+
+/** READ_LONG:
+ * @b: Buffer start addr
+ * @o: Buffer offset to start reading
+ *
+ * Read long value from the specified buff addr at given offset
+ **/
+#define READ_LONG(b, o) \
+ (uint32_t)(((uint32_t)b[o] << 24) + \
+ ((uint32_t)b[o+1] << 16) + \
+ ((uint32_t)b[o+2] << 8) + \
+ ((uint32_t)b[o+3]))
+
+/** READ_LONG_LITTLE:
+ * @b: Buffer start addr
+ * @o: Buffer offset to start reading
+ *
+ * Read long value from the specified buff addr at given offset
+ * in Little Endian
+ **/
+#define READ_LONG_LITTLE(b, o) \
+ (uint32_t)(((uint32_t)b[o + 3] << 24) + \
+ ((uint32_t) b[o + 2] << 16) + \
+ ((uint32_t) b[o + 1] << 8) + \
+ (uint32_t) b[o]);
+
+/** READ_LONG:
+ * @b: Buffer start addr
+ * @o: Buffer offset to start reading
+ *
+ * Read short value from the specified buff addr at given
+ * offset
+ **/
+#define READ_SHORT(b, o) \
+ (uint16_t) (((uint16_t)b[o] << 8) + \
+ (uint16_t) b[o + 1]);
+
+/*Mutex to serializa MPO composition*/
+static pthread_mutex_t g_mpo_lock = PTHREAD_MUTEX_INITIALIZER;
+
+/** mm_jpeg_mpo_write_long_little_endian
+ *
+ * Arguments:
+ * @buffer_addr: image start addr
+ * @buff_offset: offset in the buffer
+ * @buffer_size: Size of the buffer
+ * @value: Value to write
+ * @overflow : Overflow flag
+ *
+ * Return:
+ * None
+ *
+ * Description:
+ * Write value at the given offset
+ *
+ **/
+void mm_jpeg_mpo_write_long_little_endian(uint8_t *buff_addr, uint32_t buff_offset,
+ uint32_t buffer_size, int value, uint8_t *overflow)
+{
+ if (buff_offset + 3 >= buffer_size) {
+ *overflow = TRUE;
+ }
+
+ if (!(*overflow)) {
+ buff_addr[buff_offset + 3] = (uint8_t)((value >> 24) & 0xFF);
+ buff_addr[buff_offset + 2] = (uint8_t)((value >> 16) & 0xFF);
+ buff_addr[buff_offset + 1] = (uint8_t)((value >> 8) & 0xFF);
+ buff_addr[buff_offset] = (uint8_t)(value & 0xFF);
+ }
+}
+
+/** mm_jpeg_mpo_write_long
+ *
+ * Arguments:
+ * @buffer_addr: image start addr
+ * @buff_offset: offset in the buffer
+ * @buffer_size: Size of the buffer
+ * @value: Value to write
+ * @overflow : Overflow flag
+ *
+ * Return:
+ * None
+ *
+ * Description:
+ * Write value at the given offset
+ *
+ **/
+void mm_jpeg_mpo_write_long(uint8_t *buff_addr, uint32_t buff_offset,
+ uint32_t buffer_size, int value, uint8_t *overflow)
+{
+ if ((buff_offset + 3) >= buffer_size) {
+ *overflow = TRUE;
+ }
+
+ if (!(*overflow)) {
+ buff_addr[buff_offset] = (uint8_t)((value >> 24) & 0xFF);
+ buff_addr[buff_offset+1] = (uint8_t)((value >> 16) & 0xFF);
+ buff_addr[buff_offset+2] = (uint8_t)((value >> 8) & 0xFF);
+ buff_addr[buff_offset+3] = (uint8_t)(value & 0xFF);
+ }
+}
+
+/** mm_jpeg_mpo_get_app_marker
+ *
+ * Arguments:
+ * @buffer_addr: Jpeg image start addr
+ * @buffer_size: Size of the Buffer
+ * @app_marker: app_marker to find
+ *
+ * Return:
+ * Start offset of the specified app marker
+ *
+ * Description:
+ * Gets the start offset of the given app marker
+ *
+ **/
+uint8_t *mm_jpeg_mpo_get_app_marker(uint8_t *buffer_addr, int buffer_size,
+ int app_marker)
+{
+ int32_t byte;
+ uint8_t *p_current_addr = NULL, *p_start_offset = NULL;
+ uint16_t app_marker_size = 0;
+
+ p_current_addr = buffer_addr;
+ do {
+ do {
+ byte = *(p_current_addr);
+ p_current_addr++;
+ }
+ while ((byte != 0xFF) &&
+ (p_current_addr < (buffer_addr + (buffer_size - 1))));
+
+ //If 0xFF is not found at all, break
+ if (byte != 0xFF) {
+ LOGD("0xFF not found");
+ break;
+ }
+
+ //Read the next byte after 0xFF
+ byte = *(p_current_addr);
+ LOGD("Byte %x", byte);
+ if (byte == app_marker) {
+ LOGD("Byte %x", byte);
+ p_start_offset = ++p_current_addr;
+ break;
+ } else if (byte != M_SOI) {
+ app_marker_size = READ_SHORT(p_current_addr, 1);
+ LOGD("size %d", app_marker_size);
+ p_current_addr += app_marker_size;
+ }
+ }
+ while ((byte != M_EOI) &&
+ (p_current_addr < (buffer_addr + (buffer_size - 1))));
+
+ return p_start_offset;
+}
+
+/** mm_jpeg_mpo_get_mp_header
+ *
+ * Arguments:
+ * @app2_marker: app2_marker start offset
+ *
+ * Return:
+ * Start offset of the MP header
+ *
+ * Description:
+ * Get the start offset of the MP header (before the MP
+ * Endian field). All offsets in the MP header need to be
+ * specified wrt this start offset.
+ *
+ **/
+uint8_t *mm_jpeg_mpo_get_mp_header(uint8_t *app2_start_offset)
+{
+ uint8_t *mp_headr_start_offset = NULL;
+
+ if (app2_start_offset != NULL) {
+ mp_headr_start_offset = app2_start_offset + MP_APP2_FIELD_LENGTH_BYTES +
+ MP_FORMAT_IDENTIFIER_BYTES;
+ }
+
+ return mp_headr_start_offset;
+}
+
+/** mm_jpeg_mpo_update_header
+ *
+ * Arguments:
+ * @mpo_info: MPO Info
+ *
+ * Return:
+ * 0 - Success
+ * -1 - otherwise
+ *
+ * Description:
+ * Update the MP Index IFD of the first image with info
+ * about about all other images.
+ *
+ **/
+int mm_jpeg_mpo_update_header(mm_jpeg_mpo_info_t *mpo_info)
+{
+ uint8_t *app2_start_off_addr = NULL, *mp_headr_start_off_addr = NULL;
+ uint32_t mp_index_ifd_offset = 0, current_offset = 0, mp_entry_val_offset = 0;
+ uint8_t *aux_start_addr = NULL;
+ uint8_t overflow_flag = 0;
+ int i = 0, rc = -1;
+ uint32_t endianess = MPO_LITTLE_ENDIAN, offset_to_nxt_ifd = 8;
+ uint16_t ifd_tag_count = 0;
+
+ //Get the addr of the App Marker
+ app2_start_off_addr = mm_jpeg_mpo_get_app_marker(
+ mpo_info->output_buff.buf_vaddr, mpo_info->primary_image.buf_filled_len, M_APP2);
+ if (!app2_start_off_addr) {
+ LOGE("Cannot find App2 marker. MPO composition failed" );
+ return rc;
+ }
+ LOGD("app2_start_off_addr %p = %x",
+ app2_start_off_addr, *app2_start_off_addr);
+
+ //Get the addr of the MP Headr start offset.
+ //All offsets in the MP header are wrt to this addr
+ mp_headr_start_off_addr = mm_jpeg_mpo_get_mp_header(app2_start_off_addr);
+ if (!mp_headr_start_off_addr) {
+ LOGE("mp headr start offset is NULL. MPO composition failed" );
+ return rc;
+ }
+ LOGD("mp_headr_start_off_addr %x",
+ *mp_headr_start_off_addr);
+
+ current_offset = mp_headr_start_off_addr - mpo_info->output_buff.buf_vaddr;
+
+ endianess = READ_LONG(mpo_info->output_buff.buf_vaddr, current_offset);
+ LOGD("Endianess %d", endianess);
+
+ //Add offset to first ifd
+ current_offset += MP_ENDIAN_BYTES;
+
+ //Read the value to get MP Index IFD.
+ if (endianess == MPO_LITTLE_ENDIAN) {
+ offset_to_nxt_ifd = READ_LONG_LITTLE(mpo_info->output_buff.buf_vaddr,
+ current_offset);
+ } else {
+ offset_to_nxt_ifd = READ_LONG(mpo_info->output_buff.buf_vaddr,
+ current_offset);
+ }
+ LOGD("offset_to_nxt_ifd %d", offset_to_nxt_ifd);
+
+ current_offset = ((mp_headr_start_off_addr + offset_to_nxt_ifd) -
+ mpo_info->output_buff.buf_vaddr);
+ mp_index_ifd_offset = current_offset;
+ LOGD("mp_index_ifd_offset %d",
+ mp_index_ifd_offset);
+
+ //Traverse to MP Entry value
+ ifd_tag_count = READ_SHORT(mpo_info->output_buff.buf_vaddr, current_offset);
+ LOGD("Tag count in MP entry %d", ifd_tag_count);
+ current_offset += MP_INDEX_COUNT_BYTES;
+
+ /* Get MP Entry Value offset - Count * 12 (Each tag is 12 bytes)*/
+ current_offset += (ifd_tag_count * 12);
+ /*Add Offset to next IFD*/
+ current_offset += MP_INDEX_OFFSET_OF_NEXT_IFD_BYTES;
+
+ mp_entry_val_offset = current_offset;
+ LOGD("MP Entry value offset %d",
+ mp_entry_val_offset);
+
+ //Update image size for primary image
+ current_offset += MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_ATTRIBUTE_BYTES;
+ if (endianess == MPO_LITTLE_ENDIAN) {
+ mm_jpeg_mpo_write_long_little_endian(mpo_info->output_buff.buf_vaddr,
+ current_offset, mpo_info->output_buff_size,
+ mpo_info->primary_image.buf_filled_len, &overflow_flag);
+ } else {
+ mm_jpeg_mpo_write_long(mpo_info->output_buff.buf_vaddr,
+ current_offset, mpo_info->output_buff_size,
+ mpo_info->primary_image.buf_filled_len, &overflow_flag);
+ }
+
+ aux_start_addr = mpo_info->output_buff.buf_vaddr +
+ mpo_info->primary_image.buf_filled_len;
+
+ for (i = 0; i < mpo_info->num_of_images - 1; i++) {
+ //Go to MP Entry val for each image
+ mp_entry_val_offset += MP_INDEX_ENTRY_VALUE_BYTES;
+ current_offset = mp_entry_val_offset;
+
+ //Update image size
+ current_offset += MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_ATTRIBUTE_BYTES;
+ if (endianess == MPO_LITTLE_ENDIAN) {
+ mm_jpeg_mpo_write_long_little_endian(mpo_info->output_buff.buf_vaddr,
+ current_offset, mpo_info->output_buff_size,
+ mpo_info->aux_images[i].buf_filled_len, &overflow_flag);
+ } else {
+ mm_jpeg_mpo_write_long(mpo_info->output_buff.buf_vaddr,
+ current_offset, mpo_info->output_buff_size,
+ mpo_info->aux_images[i].buf_filled_len, &overflow_flag);
+ }
+ LOGD("aux[start_addr %x", *aux_start_addr);
+ //Update the offset
+ current_offset += MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_SIZE_BYTES;
+ if (endianess == MPO_LITTLE_ENDIAN) {
+ mm_jpeg_mpo_write_long_little_endian(mpo_info->output_buff.buf_vaddr,
+ current_offset, mpo_info->output_buff_size,
+ aux_start_addr - mp_headr_start_off_addr, &overflow_flag);
+ } else {
+ mm_jpeg_mpo_write_long(mpo_info->output_buff.buf_vaddr,
+ current_offset, mpo_info->output_buff_size,
+ aux_start_addr - mp_headr_start_off_addr, &overflow_flag);
+ }
+ aux_start_addr += mpo_info->aux_images[i].buf_filled_len;
+ }
+ if (!overflow_flag) {
+ rc = 0;
+ }
+ return rc;
+}
+
+/** mm_jpeg_mpo_compose
+ *
+ * Arguments:
+ * @mpo_info: MPO Info
+ *
+ * Return:
+ * 0 - Success
+ * -1 - otherwise
+ *
+ * Description:
+ * Compose MPO image from multiple JPEG images
+ *
+ **/
+int mm_jpeg_mpo_compose(mm_jpeg_mpo_info_t *mpo_info)
+{
+ uint8_t *aux_write_offset = NULL;
+ int i = 0, rc = -1;
+
+ pthread_mutex_lock(&g_mpo_lock);
+
+ //Primary image needs to be copied to the o/p buffer if its not already
+ if (mpo_info->output_buff.buf_filled_len == 0) {
+ if (mpo_info->primary_image.buf_filled_len < mpo_info->output_buff_size) {
+ memcpy(mpo_info->output_buff.buf_vaddr, mpo_info->primary_image.buf_vaddr,
+ mpo_info->primary_image.buf_filled_len);
+ mpo_info->output_buff.buf_filled_len +=
+ mpo_info->primary_image.buf_filled_len;
+ } else {
+ LOGE("O/P buffer not large enough. MPO composition failed");
+ pthread_mutex_unlock(&g_mpo_lock);
+ return rc;
+ }
+ }
+ //Append each Aux image to the buffer
+ for (i = 0; i < mpo_info->num_of_images - 1; i++) {
+ if ((mpo_info->output_buff.buf_filled_len +
+ mpo_info->aux_images[i].buf_filled_len) <= mpo_info->output_buff_size) {
+ aux_write_offset = mpo_info->output_buff.buf_vaddr +
+ mpo_info->output_buff.buf_filled_len;
+ memcpy(aux_write_offset, mpo_info->aux_images[i].buf_vaddr,
+ mpo_info->aux_images[i].buf_filled_len);
+ mpo_info->output_buff.buf_filled_len +=
+ mpo_info->aux_images[i].buf_filled_len;
+ } else {
+ LOGE("O/P buffer not large enough. MPO composition failed");
+ pthread_mutex_unlock(&g_mpo_lock);
+ return rc;
+ }
+ }
+
+ rc = mm_jpeg_mpo_update_header(mpo_info);
+ pthread_mutex_unlock(&g_mpo_lock);
+
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
new file mode 100644
index 0000000..2aeb78f
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
@@ -0,0 +1,186 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue)
+{
+ pthread_mutex_init(&queue->lock, NULL);
+ cam_list_init(&queue->head.list);
+ queue->size = 0;
+ return 0;
+}
+
+int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, mm_jpeg_q_data_t data)
+{
+ mm_jpeg_q_node_t* node =
+ (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_jpeg_q_node_t");
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_jpeg_q_node_t));
+ node->data = data;
+
+ pthread_mutex_lock(&queue->lock);
+ cam_list_add_tail_node(&node->list, &queue->head.list);
+ queue->size++;
+ pthread_mutex_unlock(&queue->lock);
+
+ return 0;
+
+}
+
+int32_t mm_jpeg_queue_enq_head(mm_jpeg_queue_t* queue, mm_jpeg_q_data_t data)
+{
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_jpeg_q_node_t* node =
+ (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_jpeg_q_node_t");
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_jpeg_q_node_t));
+ node->data = data;
+
+ head = &queue->head.list;
+ pos = head->next;
+
+ pthread_mutex_lock(&queue->lock);
+ cam_list_insert_before_node(&node->list, pos);
+ queue->size++;
+ pthread_mutex_unlock(&queue->lock);
+
+ return 0;
+}
+
+mm_jpeg_q_data_t mm_jpeg_queue_deq(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_q_data_t data;
+ mm_jpeg_q_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ memset(&data, 0, sizeof(data));
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ if (pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+ }
+ pthread_mutex_unlock(&queue->lock);
+
+ if (NULL != node) {
+ data = node->data;
+ free(node);
+ }
+
+ return data;
+}
+
+uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue)
+{
+ uint32_t size = 0;
+
+ pthread_mutex_lock(&queue->lock);
+ size = queue->size;
+ pthread_mutex_unlock(&queue->lock);
+
+ return size;
+
+}
+
+int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_queue_flush(queue);
+ pthread_mutex_destroy(&queue->lock);
+ return 0;
+}
+
+int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+
+ /* for now we only assume there is no ptr inside data
+ * so we free data directly */
+ if (NULL != node->data.p) {
+ free(node->data.p);
+ }
+ free(node);
+ pos = pos->next;
+ }
+ queue->size = 0;
+ pthread_mutex_unlock(&queue->lock);
+ return 0;
+}
+
+mm_jpeg_q_data_t mm_jpeg_queue_peek(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_q_data_t data;
+ mm_jpeg_q_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ memset(&data, 0, sizeof(data));
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ if (pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ }
+ pthread_mutex_unlock(&queue->lock);
+
+ if (NULL != node) {
+ data = node->data;
+ }
+ return data;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c
new file mode 100644
index 0000000..b4ee1dc
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c
@@ -0,0 +1,1185 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_inlines.h"
+
+OMX_ERRORTYPE mm_jpegdec_ebd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE *pBuffer);
+OMX_ERRORTYPE mm_jpegdec_fbd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpegdec_event_handler(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_EVENTTYPE eEvent,
+ OMX_U32 nData1,
+ OMX_U32 nData2,
+ OMX_PTR pEventData);
+
+
+/** mm_jpegdec_destroy_job
+ *
+ * Arguments:
+ * @p_session: Session obj
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the job based paramenters
+ *
+ **/
+static int32_t mm_jpegdec_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+ int32_t rc = 0;
+
+ return rc;
+}
+
+/** mm_jpeg_job_done:
+ *
+ * Arguments:
+ * @p_session: decode session
+ *
+ * Return:
+ * OMX_ERRORTYPE
+ *
+ * Description:
+ * Finalize the job
+ *
+ **/
+static void mm_jpegdec_job_done(mm_jpeg_job_session_t *p_session)
+{
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+ mm_jpeg_job_q_node_t *node = NULL;
+
+ /*Destroy job related params*/
+ mm_jpegdec_destroy_job(p_session);
+
+ /*remove the job*/
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+ p_session->jobId);
+ if (node) {
+ free(node);
+ }
+ p_session->encoding = OMX_FALSE;
+
+ /* wake up jobMgr thread to work on new job if there is any */
+ cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+
+/** mm_jpegdec_session_send_buffers:
+ *
+ * Arguments:
+ * @data: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_send_buffers(void *data)
+{
+ uint32_t i = 0;
+ mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ QOMX_BUFFER_INFO lbuffer_info;
+ mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+
+ memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+ for (i = 0; i < p_params->num_src_bufs; i++) {
+ LOGD("Source buffer %d", i);
+ lbuffer_info.fd = (OMX_U32)p_params->src_main_buf[i].fd;
+ ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_in_omx_buf[i]), 0,
+ &lbuffer_info, p_params->src_main_buf[i].buf_size,
+ p_params->src_main_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+
+ LOGD("Exit");
+ return ret;
+}
+
+/** mm_jpeg_session_free_buffers:
+ *
+ * Arguments:
+ * @data: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_free_buffers(void *data)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ uint32_t i = 0;
+ mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+ mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+
+ for (i = 0; i < p_params->num_src_bufs; i++) {
+ LOGD("Source buffer %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 0, p_session->p_in_omx_buf[i]);
+ if (ret) {
+ LOGE("Error %d", ret);
+ return ret;
+ }
+ }
+
+ for (i = 0; i < p_params->num_dst_bufs; i++) {
+ LOGD("Dest buffer %d", i);
+ ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+ if (ret) {
+ LOGE("Error");
+ return ret;
+ }
+ }
+ LOGD("Exit");
+ return ret;
+}
+
+/** mm_jpegdec_session_create:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error types
+ *
+ * Description:
+ * Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_create(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+ pthread_mutex_init(&p_session->lock, NULL);
+ pthread_cond_init(&p_session->cond, NULL);
+ cirq_reset(&p_session->cb_q);
+ p_session->state_change_pending = OMX_FALSE;
+ p_session->abort_state = MM_JPEG_ABORT_NONE;
+ p_session->error_flag = OMX_ErrorNone;
+ p_session->ebd_count = 0;
+ p_session->fbd_count = 0;
+ p_session->encode_pid = -1;
+ p_session->config = OMX_FALSE;
+
+ p_session->omx_callbacks.EmptyBufferDone = mm_jpegdec_ebd;
+ p_session->omx_callbacks.FillBufferDone = mm_jpegdec_fbd;
+ p_session->omx_callbacks.EventHandler = mm_jpegdec_event_handler;
+ p_session->exif_count_local = 0;
+
+ rc = OMX_GetHandle(&p_session->omx_handle,
+ "OMX.qcom.image.jpeg.decoder",
+ (void *)p_session,
+ &p_session->omx_callbacks);
+
+ if (OMX_ErrorNone != rc) {
+ LOGE("OMX_GetHandle failed (%d)", rc);
+ return rc;
+ }
+ return rc;
+}
+
+/** mm_jpegdec_session_destroy:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * Destroy a jpeg encode session
+ *
+ **/
+void mm_jpegdec_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+ LOGD("E");
+ if (NULL == p_session->omx_handle) {
+ LOGE("invalid handle");
+ return;
+ }
+
+ rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+ if (rc) {
+ LOGE("Error");
+ }
+
+ rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+ mm_jpegdec_session_free_buffers);
+ if (rc) {
+ LOGE("Error");
+ }
+
+ rc = OMX_FreeHandle(p_session->omx_handle);
+ if (0 != rc) {
+ LOGE("OMX_FreeHandle failed (%d)", rc);
+ }
+ p_session->omx_handle = NULL;
+
+
+ pthread_mutex_destroy(&p_session->lock);
+ pthread_cond_destroy(&p_session->cond);
+ LOGD("X");
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+ mm_jpeg_decode_job_t *p_jobparams = &p_session->decode_job;
+
+ mm_jpeg_buf_t *p_src_buf =
+ &p_params->src_main_buf[p_jobparams->src_index];
+
+ p_session->inputPort.nPortIndex = 0;
+ p_session->outputPort.nPortIndex = 1;
+
+
+ ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->inputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->outputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ p_session->inputPort.format.image.nFrameWidth =
+ (OMX_U32)p_jobparams->main_dim.src_dim.width;
+ p_session->inputPort.format.image.nFrameHeight =
+ (OMX_U32)p_jobparams->main_dim.src_dim.height;
+ p_session->inputPort.format.image.nStride =
+ p_src_buf->offset.mp[0].stride;
+ p_session->inputPort.format.image.nSliceHeight =
+ (OMX_U32)p_src_buf->offset.mp[0].scanline;
+ p_session->inputPort.format.image.eColorFormat =
+ map_jpeg_format(p_params->color_format);
+ p_session->inputPort.nBufferSize =
+ p_params->src_main_buf[p_jobparams->src_index].buf_size;
+ p_session->inputPort.nBufferCountActual = (OMX_U32)p_params->num_src_bufs;
+ ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->inputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ return ret;
+}
+
+
+/** mm_jpegdec_session_config_main:
+ *
+ * Arguments:
+ * @p_session: job session
+ *
+ * Return:
+ * OMX error values
+ *
+ * Description:
+ * Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+ /* config port */
+ LOGD("config port");
+ rc = mm_jpegdec_session_config_ports(p_session);
+ if (OMX_ErrorNone != rc) {
+ LOGE("config port failed");
+ return rc;
+ }
+
+
+ /* TODO: config crop */
+
+ return rc;
+}
+
+/** mm_jpeg_session_configure:
+ *
+ * Arguments:
+ * @data: encode session
+ *
+ * Return:
+ * none
+ *
+ * Description:
+ * Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpegdec_session_configure(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+ LOGD("E ");
+
+ MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+ /* config main img */
+ ret = mm_jpegdec_session_config_main(p_session);
+ if (OMX_ErrorNone != ret) {
+ LOGE("config main img failed");
+ goto error;
+ }
+
+ /* TODO: common config (if needed) */
+
+ ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+ mm_jpegdec_session_send_buffers);
+ if (ret) {
+ LOGE("change state to idle failed %d", ret);
+ goto error;
+ }
+
+ ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+ NULL);
+ if (ret) {
+ LOGE("change state to executing failed %d", ret);
+ goto error;
+ }
+
+error:
+ LOGD("X ret %d", ret);
+ return ret;
+}
+
+static OMX_ERRORTYPE mm_jpeg_session_port_enable(
+ mm_jpeg_job_session_t *p_session,
+ OMX_U32 nPortIndex,
+ OMX_BOOL wait)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ OMX_EVENTTYPE lEvent;
+
+ pthread_mutex_lock(&p_session->lock);
+ p_session->event_pending = OMX_TRUE;
+ pthread_mutex_unlock(&p_session->lock);
+
+ ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+ nPortIndex, NULL);
+
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ if (wait == OMX_TRUE) {
+ // Wait for cmd complete
+ pthread_mutex_lock(&p_session->lock);
+ if (p_session->event_pending == OMX_TRUE) {
+ LOGD("before wait");
+ pthread_cond_wait(&p_session->cond, &p_session->lock);
+ lEvent = p_session->omxEvent;
+ LOGD("after wait");
+ }
+ lEvent = p_session->omxEvent;
+ pthread_mutex_unlock(&p_session->lock);
+
+ if (lEvent != OMX_EventCmdComplete) {
+ LOGD("Unexpected event %d",lEvent);
+ return OMX_ErrorUndefined;
+ }
+ }
+ return OMX_ErrorNone;
+}
+
+static OMX_ERRORTYPE mm_jpeg_session_port_disable(
+ mm_jpeg_job_session_t *p_session,
+ OMX_U32 nPortIndex,
+ OMX_BOOL wait)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ OMX_EVENTTYPE lEvent;
+
+ pthread_mutex_lock(&p_session->lock);
+ p_session->event_pending = OMX_TRUE;
+ pthread_mutex_unlock(&p_session->lock);
+
+ ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+ nPortIndex, NULL);
+
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+ if (wait == OMX_TRUE) {
+ // Wait for cmd complete
+ pthread_mutex_lock(&p_session->lock);
+ if (p_session->event_pending == OMX_TRUE) {
+ LOGD("before wait");
+ pthread_cond_wait(&p_session->cond, &p_session->lock);
+
+ LOGD("after wait");
+ }
+ lEvent = p_session->omxEvent;
+ pthread_mutex_unlock(&p_session->lock);
+
+ if (lEvent != OMX_EventCmdComplete) {
+ LOGD("Unexpected event %d",lEvent);
+ return OMX_ErrorUndefined;
+ }
+ }
+ return OMX_ErrorNone;
+}
+
+
+/** mm_jpegdec_session_decode:
+ *
+ * Arguments:
+ * @p_session: encode session
+ *
+ * Return:
+ * OMX_ERRORTYPE
+ *
+ * Description:
+ * Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpegdec_session_decode(mm_jpeg_job_session_t *p_session)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+ mm_jpeg_decode_job_t *p_jobparams = &p_session->decode_job;
+ OMX_EVENTTYPE lEvent;
+ uint32_t i;
+ QOMX_BUFFER_INFO lbuffer_info;
+
+ pthread_mutex_lock(&p_session->lock);
+ p_session->abort_state = MM_JPEG_ABORT_NONE;
+ p_session->encoding = OMX_FALSE;
+ pthread_mutex_unlock(&p_session->lock);
+
+ if (OMX_FALSE == p_session->config) {
+ ret = mm_jpegdec_session_configure(p_session);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+ p_session->config = OMX_TRUE;
+ }
+
+ pthread_mutex_lock(&p_session->lock);
+ p_session->encoding = OMX_TRUE;
+ pthread_mutex_unlock(&p_session->lock);
+
+ MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+ p_session->event_pending = OMX_TRUE;
+
+ ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+ p_session->p_in_omx_buf[p_jobparams->src_index]);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+
+ // Wait for port settings changed
+ pthread_mutex_lock(&p_session->lock);
+ if (p_session->event_pending == OMX_TRUE) {
+ LOGD("before wait");
+ pthread_cond_wait(&p_session->cond, &p_session->lock);
+ }
+ lEvent = p_session->omxEvent;
+ LOGD("after wait");
+ pthread_mutex_unlock(&p_session->lock);
+
+ if (lEvent != OMX_EventPortSettingsChanged) {
+ LOGD("Unexpected event %d",lEvent);
+ goto error;
+ }
+
+ // Disable output port (wait)
+ mm_jpeg_session_port_disable(p_session,
+ p_session->outputPort.nPortIndex,
+ OMX_TRUE);
+
+ // Get port definition
+ ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->outputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ // Set port definition
+ p_session->outputPort.format.image.nFrameWidth =
+ (OMX_U32)p_jobparams->main_dim.dst_dim.width;
+ p_session->outputPort.format.image.nFrameHeight =
+ (OMX_U32)p_jobparams->main_dim.dst_dim.height;
+ p_session->outputPort.format.image.eColorFormat =
+ map_jpeg_format(p_params->color_format);
+
+ p_session->outputPort.nBufferSize =
+ p_params->dest_buf[p_jobparams->dst_index].buf_size;
+ p_session->outputPort.nBufferCountActual = (OMX_U32)p_params->num_dst_bufs;
+
+ p_session->outputPort.format.image.nSliceHeight =
+ (OMX_U32)
+ p_params->dest_buf[p_jobparams->dst_index].offset.mp[0].scanline;
+ p_session->outputPort.format.image.nStride =
+ p_params->dest_buf[p_jobparams->dst_index].offset.mp[0].stride;
+
+ ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+ &p_session->outputPort);
+ if (ret) {
+ LOGE("failed");
+ return ret;
+ }
+
+ // Enable port (no wait)
+ mm_jpeg_session_port_enable(p_session,
+ p_session->outputPort.nPortIndex,
+ OMX_FALSE);
+
+ memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+ // Use buffers
+ for (i = 0; i < p_params->num_dst_bufs; i++) {
+ lbuffer_info.fd = (OMX_U32)p_params->dest_buf[i].fd;
+ LOGD("Dest buffer %d", (unsigned int)i);
+ ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+ 1, &lbuffer_info, p_params->dest_buf[i].buf_size,
+ p_params->dest_buf[i].buf_vaddr);
+ if (ret) {
+ LOGE("Error");
+ return ret;
+ }
+ }
+
+ // Wait for port enable completion
+ pthread_mutex_lock(&p_session->lock);
+ if (p_session->event_pending == OMX_TRUE) {
+ LOGD("before wait");
+ pthread_cond_wait(&p_session->cond, &p_session->lock);
+ lEvent = p_session->omxEvent;
+ LOGD("after wait");
+ }
+ lEvent = p_session->omxEvent;
+ pthread_mutex_unlock(&p_session->lock);
+
+ if (lEvent != OMX_EventCmdComplete) {
+ LOGD("Unexpected event %d",lEvent);
+ goto error;
+ }
+
+ ret = OMX_FillThisBuffer(p_session->omx_handle,
+ p_session->p_out_omx_buf[p_jobparams->dst_index]);
+ if (ret) {
+ LOGE("Error");
+ goto error;
+ }
+
+ MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+ LOGD("X ");
+ return ret;
+}
+
+/** mm_jpegdec_process_decoding_job:
+ *
+ * Arguments:
+ * @my_obj: jpeg client
+ * @job_node: job node
+ *
+ * Return:
+ * 0 for success -1 otherwise
+ *
+ * Description:
+ * Start the encoding job
+ *
+ **/
+int32_t mm_jpegdec_process_decoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = 0;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_job_session_t *p_session = NULL;
+
+ /* check if valid session */
+ p_session = mm_jpeg_get_session(my_obj, job_node->dec_info.job_id);
+ if (NULL == p_session) {
+ LOGE("invalid job id %x",
+ job_node->dec_info.job_id);
+ return -1;
+ }
+
+ /* sent encode cmd to OMX, queue job into ongoing queue */
+ qdata.p = job_node;
+ rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, qdata);
+ if (rc) {
+ LOGE("jpeg enqueue failed %d", ret);
+ goto error;
+ }
+
+ p_session->decode_job = job_node->dec_info.decode_job;
+ p_session->jobId = job_node->dec_info.job_id;
+ ret = mm_jpegdec_session_decode(p_session);
+ if (ret) {
+ LOGE("encode session failed");
+ goto error;
+ }
+
+ LOGD("Success X ");
+ return rc;
+
+error:
+
+ if ((OMX_ErrorNone != ret) &&
+ (NULL != p_session->dec_params.jpeg_cb)) {
+ p_session->job_status = JPEG_JOB_STATUS_ERROR;
+ LOGD("send jpeg error callback %d",
+ p_session->job_status);
+ p_session->dec_params.jpeg_cb(p_session->job_status,
+ p_session->client_hdl,
+ p_session->jobId,
+ NULL,
+ p_session->dec_params.userdata);
+ }
+
+ /*remove the job*/
+ mm_jpegdec_job_done(p_session);
+ LOGD("Error X ");
+
+ return rc;
+}
+
+/** mm_jpeg_start_decode_job:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ * @job: pointer to encode job
+ * @jobId: job id
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Start the encoding job
+ *
+ **/
+int32_t mm_jpegdec_start_decode_job(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_t *job,
+ uint32_t *job_id)
+{
+ mm_jpeg_q_data_t qdata;
+ int32_t rc = -1;
+ uint8_t session_idx = 0;
+ uint8_t client_idx = 0;
+ mm_jpeg_job_q_node_t* node = NULL;
+ mm_jpeg_job_session_t *p_session = NULL;
+ mm_jpeg_decode_job_t *p_jobparams = &job->decode_job;
+
+ *job_id = 0;
+
+ /* check if valid session */
+ session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+ client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+ LOGD("session_idx %d client idx %d",
+ session_idx, client_idx);
+
+ if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+ (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+ LOGE("invalid session id %x",
+ job->decode_job.session_id);
+ return rc;
+ }
+
+ p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+ if (OMX_FALSE == p_session->active) {
+ LOGE("session not active %x",
+ job->decode_job.session_id);
+ return rc;
+ }
+
+ if ((p_jobparams->src_index >= (int32_t)p_session->dec_params.num_src_bufs) ||
+ (p_jobparams->dst_index >= (int32_t)p_session->dec_params.num_dst_bufs)) {
+ LOGE("invalid buffer indices");
+ return rc;
+ }
+
+ /* enqueue new job into todo job queue */
+ node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+ if (NULL == node) {
+ LOGE("No memory for mm_jpeg_job_q_node_t");
+ return -1;
+ }
+
+ *job_id = job->decode_job.session_id |
+ ((p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+ memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+ node->dec_info.decode_job = job->decode_job;
+ node->dec_info.job_id = *job_id;
+ node->dec_info.client_handle = p_session->client_hdl;
+ node->type = MM_JPEG_CMD_TYPE_DECODE_JOB;
+
+ qdata.p = node;
+ rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, qdata);
+ if (0 == rc) {
+ cam_sem_post(&my_obj->job_mgr.job_sem);
+ }
+
+ return rc;
+}
+
+/** mm_jpegdec_create_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ * @p_params: pointer to encode params
+ * @p_session_id: session id
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Start the encoding session
+ *
+ **/
+int32_t mm_jpegdec_create_session(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ mm_jpeg_decode_params_t *p_params,
+ uint32_t* p_session_id)
+{
+ int32_t rc = 0;
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ uint8_t clnt_idx = 0;
+ int session_idx = -1;
+ mm_jpeg_job_session_t *p_session = NULL;
+ *p_session_id = 0;
+
+ /* validate the parameters */
+ if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+ || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+ LOGE("invalid num buffers");
+ return rc;
+ }
+
+ /* check if valid client */
+ clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+ if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+ LOGE("invalid client with handler (%d)", client_hdl);
+ return rc;
+ }
+
+ session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+ if (session_idx < 0) {
+ LOGE("invalid session id (%d)", session_idx);
+ return rc;
+ }
+
+ ret = mm_jpegdec_session_create(p_session);
+ if (OMX_ErrorNone != ret) {
+ p_session->active = OMX_FALSE;
+ LOGE("jpeg session create failed");
+ return rc;
+ }
+
+ *p_session_id = (JOB_ID_MAGICVAL << 24) |
+ ((unsigned)session_idx << 8) | clnt_idx;
+
+ /*copy the params*/
+ p_session->dec_params = *p_params;
+ p_session->client_hdl = client_hdl;
+ p_session->sessionId = *p_session_id;
+ p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+ LOGD("session id %x", *p_session_id);
+
+ return rc;
+}
+
+/** mm_jpegdec_destroy_session:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @session_id: session index
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the encoding session
+ *
+ **/
+int32_t mm_jpegdec_destroy_session(mm_jpeg_obj *my_obj,
+ mm_jpeg_job_session_t *p_session)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_q_node_t *node = NULL;
+
+ if (NULL == p_session) {
+ LOGE("invalid session");
+ return rc;
+ }
+ uint32_t session_id = p_session->sessionId;
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ /* abort job if in todo queue */
+ LOGD("abort todo jobs");
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+ while (NULL != node) {
+ free(node);
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+ }
+
+ /* abort job if in ongoing queue */
+ LOGD("abort ongoing jobs");
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+ while (NULL != node) {
+ free(node);
+ node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+ }
+
+ /* abort the current session */
+ mm_jpeg_session_abort(p_session);
+ mm_jpegdec_session_destroy(p_session);
+ mm_jpeg_remove_session_idx(my_obj, session_id);
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ /* wake up jobMgr thread to work on new job if there is any */
+ cam_sem_post(&my_obj->job_mgr.job_sem);
+ LOGD("X");
+
+ return rc;
+}
+
+/** mm_jpegdec_destroy_session_by_id:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @session_id: session index
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Destroy the encoding session
+ *
+ **/
+int32_t mm_jpegdec_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+
+ if (NULL == p_session) {
+ LOGE("session is not valid");
+ return rc;
+ }
+
+ return mm_jpegdec_destroy_session(my_obj, p_session);
+}
+
+
+
+OMX_ERRORTYPE mm_jpegdec_ebd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE *pBuffer)
+{
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+ LOGD("count %d ", p_session->ebd_count);
+ pthread_mutex_lock(&p_session->lock);
+ p_session->ebd_count++;
+ pthread_mutex_unlock(&p_session->lock);
+ return 0;
+}
+
+OMX_ERRORTYPE mm_jpegdec_fbd(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE *pBuffer)
+{
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+ mm_jpeg_output_t output_buf;
+
+ LOGD("count %d ", p_session->fbd_count);
+
+ pthread_mutex_lock(&p_session->lock);
+
+ if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+ pthread_mutex_unlock(&p_session->lock);
+ return ret;
+ }
+
+ p_session->fbd_count++;
+ if (NULL != p_session->dec_params.jpeg_cb) {
+ p_session->job_status = JPEG_JOB_STATUS_DONE;
+ output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+ output_buf.buf_vaddr = pBuffer->pBuffer;
+ output_buf.fd = -1;
+ LOGD("send jpeg callback %d",
+ p_session->job_status);
+ p_session->dec_params.jpeg_cb(p_session->job_status,
+ p_session->client_hdl,
+ p_session->jobId,
+ &output_buf,
+ p_session->dec_params.userdata);
+
+ /* remove from ready queue */
+ mm_jpegdec_job_done(p_session);
+ }
+ pthread_mutex_unlock(&p_session->lock);
+ LOGD("Exit");
+
+ return ret;
+}
+
+OMX_ERRORTYPE mm_jpegdec_event_handler(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_EVENTTYPE eEvent,
+ OMX_U32 nData1,
+ OMX_U32 nData2,
+ OMX_PTR pEventData)
+{
+ mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+ LOGD("%d %d %d state %d", eEvent, (int)nData1,
+ (int)nData2, p_session->abort_state);
+
+ LOGD("AppData=%p ", pAppData);
+
+ pthread_mutex_lock(&p_session->lock);
+ p_session->omxEvent = eEvent;
+ if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+ p_session->abort_state = MM_JPEG_ABORT_DONE;
+ pthread_cond_signal(&p_session->cond);
+ pthread_mutex_unlock(&p_session->lock);
+ return OMX_ErrorNone;
+ }
+
+ if (eEvent == OMX_EventError) {
+ if (p_session->encoding == OMX_TRUE) {
+ LOGD("Error during encoding");
+
+ /* send jpeg callback */
+ if (NULL != p_session->dec_params.jpeg_cb) {
+ p_session->job_status = JPEG_JOB_STATUS_ERROR;
+ LOGD("send jpeg error callback %d",
+ p_session->job_status);
+ p_session->dec_params.jpeg_cb(p_session->job_status,
+ p_session->client_hdl,
+ p_session->jobId,
+ NULL,
+ p_session->dec_params.userdata);
+ }
+
+ /* remove from ready queue */
+ mm_jpegdec_job_done(p_session);
+ }
+ pthread_cond_signal(&p_session->cond);
+ } else if (eEvent == OMX_EventCmdComplete) {
+ p_session->state_change_pending = OMX_FALSE;
+ p_session->event_pending = OMX_FALSE;
+ pthread_cond_signal(&p_session->cond);
+ } else if (eEvent == OMX_EventPortSettingsChanged) {
+ p_session->event_pending = OMX_FALSE;
+ pthread_cond_signal(&p_session->cond);
+ }
+
+ pthread_mutex_unlock(&p_session->lock);
+ LOGD("Exit");
+ return OMX_ErrorNone;
+}
+
+/** mm_jpegdec_abort_job:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ * @client_hdl: client handle
+ * @jobId: job id
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Abort the encoding session
+ *
+ **/
+int32_t mm_jpegdec_abort_job(mm_jpeg_obj *my_obj,
+ uint32_t jobId)
+{
+ int32_t rc = -1;
+ mm_jpeg_job_q_node_t *node = NULL;
+ mm_jpeg_job_session_t *p_session = NULL;
+
+ LOGD("Enter");
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ /* abort job if in todo queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+ if (NULL != node) {
+ free(node);
+ goto abort_done;
+ }
+
+ /* abort job if in ongoing queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+ if (NULL != node) {
+ /* find job that is OMX ongoing, ask OMX to abort the job */
+ p_session = mm_jpeg_get_session(my_obj, node->dec_info.job_id);
+ if (p_session) {
+ mm_jpeg_session_abort(p_session);
+ } else {
+ LOGE("Invalid job id 0x%x",
+ node->dec_info.job_id);
+ }
+ free(node);
+ goto abort_done;
+ }
+
+abort_done:
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ return rc;
+}
+/** mm_jpegdec_init:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpegdec_init(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+
+ /* init locks */
+ pthread_mutex_init(&my_obj->job_lock, NULL);
+
+ /* init ongoing job queue */
+ rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+ if (0 != rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+ /* init job semaphore and launch jobmgr thread */
+ LOGD("Launch jobmgr thread rc %d", rc);
+ rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+ if (0 != rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+ /* load OMX */
+ if (OMX_ErrorNone != OMX_Init()) {
+ /* roll back in error case */
+ LOGE("OMX_Init failed (%d)", rc);
+ mm_jpeg_jobmgr_thread_release(my_obj);
+ mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ pthread_mutex_destroy(&my_obj->job_lock);
+ }
+
+ return rc;
+}
+
+/** mm_jpegdec_deinit:
+ *
+ * Arguments:
+ * @my_obj: jpeg object
+ *
+ * Return:
+ * 0 for success else failure
+ *
+ * Description:
+ * Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpegdec_deinit(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+
+ /* release jobmgr thread */
+ rc = mm_jpeg_jobmgr_thread_release(my_obj);
+ if (0 != rc) {
+ LOGE("Error");
+ }
+
+ /* unload OMX engine */
+ OMX_Deinit();
+
+ /* deinit ongoing job and cb queue */
+ rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ if (0 != rc) {
+ LOGE("Error");
+ }
+
+ /* destroy locks */
+ pthread_mutex_destroy(&my_obj->job_lock);
+
+ return rc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c
new file mode 100644
index 0000000..df6656b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c
@@ -0,0 +1,301 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+static pthread_mutex_t g_dec_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_jpeg_obj* g_jpegdec_obj = NULL;
+
+/** mm_jpeg_intf_start_job:
+ *
+ * Arguments:
+ * @client_hdl: client handle
+ * @job: jpeg job object
+ * @jobId: job id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * start the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+ int32_t rc = -1;
+
+ if (NULL == job ||
+ NULL == job_id) {
+ LOGE("invalid parameters for job or jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_dec_intf_lock);
+ if (NULL == g_jpegdec_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+ }
+ rc = mm_jpegdec_start_decode_job(g_jpegdec_obj, job, job_id);
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ * Arguments:
+ * @client_hdl: client handle
+ * @p_params: encode parameters
+ * @p_session_id: session id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Create new jpeg session
+ *
+ **/
+static int32_t mm_jpegdec_intf_create_session(uint32_t client_hdl,
+ mm_jpeg_decode_params_t *p_params,
+ uint32_t *p_session_id)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+ LOGE("invalid client_hdl or jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_dec_intf_lock);
+ if (NULL == g_jpegdec_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpegdec_create_session(g_jpegdec_obj, client_hdl, p_params, p_session_id);
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ * Arguments:
+ * @session_id: session id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpegdec_intf_destroy_session(uint32_t session_id)
+{
+ int32_t rc = -1;
+
+ if (0 == session_id) {
+ LOGE("invalid client_hdl or jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_dec_intf_lock);
+ if (NULL == g_jpegdec_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpegdec_destroy_session_by_id(g_jpegdec_obj, session_id);
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+}
+
+/** mm_jpegdec_intf_abort_job:
+ *
+ * Arguments:
+ * @jobId: job id
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_abort_job(uint32_t job_id)
+{
+ int32_t rc = -1;
+
+ if (0 == job_id) {
+ LOGE("invalid jobId");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_dec_intf_lock);
+ if (NULL == g_jpegdec_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpegdec_abort_job(g_jpegdec_obj, job_id);
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ * Arguments:
+ * @client_hdl: client handle
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Close the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_close(uint32_t client_hdl)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl) {
+ LOGE("invalid client_hdl");
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_dec_intf_lock);
+ if (NULL == g_jpegdec_obj) {
+ /* mm_jpeg obj not exists, return error */
+ LOGE("mm_jpeg is not opened yet");
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_close(g_jpegdec_obj, client_hdl);
+ g_jpegdec_obj->num_clients--;
+ if(0 == rc) {
+ if (0 == g_jpegdec_obj->num_clients) {
+ /* No client, close jpeg internally */
+ rc = mm_jpegdec_deinit(g_jpegdec_obj);
+ free(g_jpegdec_obj);
+ g_jpegdec_obj = NULL;
+ }
+ }
+
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return rc;
+}
+
+
+
+/** jpegdec_open:
+ *
+ * Arguments:
+ * @ops: ops table pointer
+ *
+ * Return:
+ * 0 failure, success otherwise
+ *
+ * Description:
+ * Open a jpeg client
+ *
+ **/
+uint32_t jpegdec_open(mm_jpegdec_ops_t *ops)
+{
+ int32_t rc = 0;
+ uint32_t clnt_hdl = 0;
+ mm_jpeg_obj* jpeg_obj = NULL;
+
+ pthread_mutex_lock(&g_dec_intf_lock);
+ /* first time open */
+ if(NULL == g_jpegdec_obj) {
+ jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+ if(NULL == jpeg_obj) {
+ LOGE("no mem");
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return clnt_hdl;
+ }
+
+ /* initialize jpeg obj */
+ memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+ rc = mm_jpegdec_init(jpeg_obj);
+ if(0 != rc) {
+ LOGE("mm_jpeg_init err = %d", rc);
+ free(jpeg_obj);
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return clnt_hdl;
+ }
+
+ /* remember in global variable */
+ g_jpegdec_obj = jpeg_obj;
+ }
+
+ /* open new client */
+ clnt_hdl = mm_jpeg_new_client(g_jpegdec_obj);
+ if (clnt_hdl > 0) {
+ /* valid client */
+ if (NULL != ops) {
+ /* fill in ops tbl if ptr not NULL */
+ ops->start_job = mm_jpegdec_intf_start_job;
+ ops->abort_job = mm_jpegdec_intf_abort_job;
+ ops->create_session = mm_jpegdec_intf_create_session;
+ ops->destroy_session = mm_jpegdec_intf_destroy_session;
+ ops->close = mm_jpegdec_intf_close;
+ }
+ } else {
+ /* failed new client */
+ LOGE("mm_jpeg_new_client failed");
+
+ if (0 == g_jpegdec_obj->num_clients) {
+ /* no client, close jpeg */
+ mm_jpegdec_deinit(g_jpegdec_obj);
+ free(g_jpegdec_obj);
+ g_jpegdec_obj = NULL;
+ }
+ }
+
+ pthread_mutex_unlock(&g_dec_intf_lock);
+ return clnt_hdl;
+}
+
+
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk b/camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk
new file mode 100644
index 0000000..b42636c
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk
@@ -0,0 +1,87 @@
+#encoder int test
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+MM_JPEG_TEST_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../common.mk
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_JPEG_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -D_ANDROID_
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+OMX_HEADER_DIR := frameworks/native/include/media/openmax
+OMX_CORE_DIR := hardware/qcom/camera/mm-image-codec
+
+LOCAL_C_INCLUDES := $(MM_JPEG_TEST_PATH)
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../inc
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../common
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../mm-camera-interface/inc
+LOCAL_C_INCLUDES += $(OMX_HEADER_DIR)
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qexif
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_SRC_FILES := mm_jpeg_test.c
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_MODULE := mm-jpeg-interface-test
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl libmmjpeg_interface
+
+include $(BUILD_EXECUTABLE)
+
+
+
+#decoder int test
+
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_JPEG_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_CFLAGS += -D_ANDROID_
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+OMX_HEADER_DIR := frameworks/native/include/media/openmax
+OMX_CORE_DIR := hardware/qcom/camera/mm-image-codec
+
+LOCAL_C_INCLUDES := $(MM_JPEG_TEST_PATH)
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../inc
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../common
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../mm-camera-interface/inc
+LOCAL_C_INCLUDES += $(OMX_HEADER_DIR)
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qexif
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_SRC_FILES := mm_jpegdec_test.c
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_MODULE := mm-jpegdec-interface-test
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl libmmjpeg_interface
+
+include $(BUILD_EXECUTABLE)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH) \ No newline at end of file
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c
new file mode 100644
index 0000000..b1ddafc
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c
@@ -0,0 +1,776 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <stdlib.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+
+#define MAX_NUM_BUFS (12)
+#define MAX_NUM_CLIENT (8)
+
+/** DUMP_TO_FILE:
+ * @filename: file name
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ fwrite(p_addr, 1, len, fp); \
+ fclose(fp); \
+ } else { \
+ LOGE("cannot dump image"); \
+ } \
+})
+
+static uint32_t g_count = 1U, g_i;
+
+typedef struct {
+ mm_jpeg_color_format fmt;
+ cam_rational_type_t mult;
+ const char *str;
+} mm_jpeg_intf_test_colfmt_t;
+
+typedef struct {
+ char *filename;
+ int width;
+ int height;
+ char *out_filename;
+ uint32_t burst_mode;
+ uint32_t min_out_bufs;
+ mm_jpeg_intf_test_colfmt_t col_fmt;
+ uint32_t encode_thumbnail;
+ int tmb_width;
+ int tmb_height;
+ int main_quality;
+ int thumb_quality;
+ char *qtable_luma_file;
+ char *qtable_chroma_file;
+ int client_cnt;
+} jpeg_test_input_t;
+
+/* Static constants */
+/* default Luma Qtable */
+uint8_t DEFAULT_QTABLE_0[QUANT_SIZE] = {
+ 16, 11, 10, 16, 24, 40, 51, 61,
+ 12, 12, 14, 19, 26, 58, 60, 55,
+ 14, 13, 16, 24, 40, 57, 69, 56,
+ 14, 17, 22, 29, 51, 87, 80, 62,
+ 18, 22, 37, 56, 68, 109, 103, 77,
+ 24, 35, 55, 64, 81, 104, 113, 92,
+ 49, 64, 78, 87, 103, 121, 120, 101,
+ 72, 92, 95, 98, 112, 100, 103, 99
+};
+
+/* default Chroma Qtable */
+uint8_t DEFAULT_QTABLE_1[QUANT_SIZE] = {
+ 17, 18, 24, 47, 99, 99, 99, 99,
+ 18, 21, 26, 66, 99, 99, 99, 99,
+ 24, 26, 56, 99, 99, 99, 99, 99,
+ 47, 66, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99
+};
+
+typedef struct {
+ char *filename[MAX_NUM_BUFS];
+ int width;
+ int height;
+ char *out_filename[MAX_NUM_BUFS];
+ pthread_mutex_t lock;
+ pthread_cond_t cond;
+ pthread_t thread_id;
+ buffer_t input[MAX_NUM_BUFS];
+ buffer_t output[MAX_NUM_BUFS];
+ int use_ion;
+ uint32_t handle;
+ mm_jpeg_ops_t ops;
+ uint32_t job_id[MAX_NUM_BUFS];
+ mm_jpeg_encode_params_t params;
+ mm_jpeg_job_t job;
+ uint32_t session_id;
+ uint32_t num_bufs;
+ uint32_t min_out_bufs;
+ size_t buf_filled_len[MAX_NUM_BUFS];
+ mm_dimension pic_size;
+ int ret;
+ int clinet_id;
+} mm_jpeg_intf_test_t;
+
+
+
+static const mm_jpeg_intf_test_colfmt_t color_formats[] =
+{
+ { MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2, {3, 2}, "YCRCBLP_H2V2" },
+ { MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2, {3, 2}, "YCBCRLP_H2V2" },
+ { MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1, {2, 1}, "YCRCBLP_H2V1" },
+ { MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1, {2, 1}, "YCBCRLP_H2V1" },
+ { MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2, {2, 1}, "YCRCBLP_H1V2" },
+ { MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2, {2, 1}, "YCBCRLP_H1V2" },
+ { MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1, {3, 1}, "YCRCBLP_H1V1" },
+ { MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1, {3, 1}, "YCBCRLP_H1V1" }
+};
+
+static jpeg_test_input_t jpeg_input[] = {
+ { QCAMERA_DUMP_FRM_LOCATION"test_1.yuv", 4000, 3008, QCAMERA_DUMP_FRM_LOCATION"test_1.jpg", 0, 0,
+ { MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2, {3, 2}, "YCRCBLP_H2V2" },
+ 0, 320, 240, 80, 80, NULL, NULL, 1}
+};
+
+static void mm_jpeg_encode_callback(jpeg_job_status_t status,
+ uint32_t client_hdl,
+ uint32_t jobId,
+ mm_jpeg_output_t *p_output,
+ void *userData)
+{
+ mm_jpeg_intf_test_t *p_obj = (mm_jpeg_intf_test_t *)userData;
+
+ pthread_mutex_lock(&p_obj->lock);
+
+ if (status == JPEG_JOB_STATUS_ERROR) {
+ LOGE("Encode error");
+ } else {
+ int i = 0;
+ for (i = 0; p_obj->job_id[i] && (jobId != p_obj->job_id[i]); i++)
+ ;
+ if (!p_obj->job_id[i]) {
+ LOGE("Cannot find job ID!!!");
+ goto error;
+ }
+ LOGE("Encode success addr %p len %zu idx %d",
+ p_output->buf_vaddr, p_output->buf_filled_len, i);
+
+ p_obj->buf_filled_len[i] = p_output->buf_filled_len;
+ if (p_obj->min_out_bufs) {
+ LOGE("Saving file%s addr %p len %zu",
+ p_obj->out_filename[i],
+ p_output->buf_vaddr, p_output->buf_filled_len);
+ DUMP_TO_FILE(p_obj->out_filename[i], p_output->buf_vaddr,
+ p_output->buf_filled_len);
+ }
+ }
+ g_i++;
+
+error:
+
+ if (g_i >= g_count) {
+ LOGE("Signal the thread");
+ pthread_cond_signal(&p_obj->cond);
+ }
+ pthread_mutex_unlock(&p_obj->lock);
+}
+
+int mm_jpeg_test_alloc(buffer_t *p_buffer, int use_pmem)
+{
+ int ret = 0;
+ /*Allocate buffers*/
+ if (use_pmem) {
+ p_buffer->addr = (uint8_t *)buffer_allocate(p_buffer, 0);
+ if (NULL == p_buffer->addr) {
+ LOGE("Error");
+ return -1;
+ }
+ } else {
+ /* Allocate heap memory */
+ p_buffer->addr = (uint8_t *)malloc(p_buffer->size);
+ if (NULL == p_buffer->addr) {
+ LOGE("Error");
+ return -1;
+ }
+ }
+ return ret;
+}
+
+void mm_jpeg_test_free(buffer_t *p_buffer)
+{
+ if (p_buffer->addr == NULL)
+ return;
+
+ if (p_buffer->p_pmem_fd >= 0)
+ buffer_deallocate(p_buffer);
+ else
+ free(p_buffer->addr);
+
+ memset(p_buffer, 0x0, sizeof(buffer_t));
+}
+
+int mm_jpeg_test_read(mm_jpeg_intf_test_t *p_obj, uint32_t idx)
+{
+ FILE *fp = NULL;
+ size_t file_size = 0;
+ fp = fopen(p_obj->filename[idx], "rb");
+ if (!fp) {
+ LOGE("error");
+ return -1;
+ }
+ fseek(fp, 0, SEEK_END);
+ file_size = (size_t)ftell(fp);
+ fseek(fp, 0, SEEK_SET);
+ LOGE("input file size is %zu buf_size %zu",
+ file_size, p_obj->input[idx].size);
+
+ if (p_obj->input[idx].size > file_size) {
+ LOGE("error");
+ fclose(fp);
+ return -1;
+ }
+ fread(p_obj->input[idx].addr, 1, p_obj->input[idx].size, fp);
+ fclose(fp);
+ return 0;
+}
+
+/** mm_jpeg_test_read_qtable:
+ *
+ * Arguments:
+ * @filename: Qtable filename
+ * @chroma_flag: Flag indicating chroma qtable
+ *
+ * Return:
+ * 0 success, failure otherwise
+ *
+ * Description:
+ * Reads qtable from file and sets it in appropriate qtable
+ * based on flag.
+ **/
+int mm_jpeg_test_read_qtable(const char *filename, bool chroma_flag)
+{
+ FILE *fp = NULL;
+ int i;
+
+ if (filename == NULL)
+ return 0;
+
+ fp = fopen(filename, "r");
+ if (!fp) {
+ LOGE("error cannot open file");
+ return -1;
+ }
+
+ if (chroma_flag) {
+ for (i = 0; i < QUANT_SIZE; i++)
+ fscanf(fp, "%hhu,", &DEFAULT_QTABLE_1[i]);
+ } else {
+ for (i = 0; i < QUANT_SIZE; i++)
+ fscanf(fp, "%hhu,", &DEFAULT_QTABLE_0[i]);
+ }
+
+ fclose(fp);
+ return 0;
+}
+
+static int encode_init(jpeg_test_input_t *p_input, mm_jpeg_intf_test_t *p_obj,
+ int client_id)
+{
+ int rc = -1;
+ size_t size = (size_t)(p_input->width * p_input->height);
+ mm_jpeg_encode_params_t *p_params = &p_obj->params;
+ mm_jpeg_encode_job_t *p_job_params = &p_obj->job.encode_job;
+ uint32_t i = 0;
+ uint32_t burst_mode = p_input->burst_mode;
+ jpeg_test_input_t *p_in = p_input;
+
+ do {
+ p_obj->filename[i] = p_in->filename;
+ p_obj->width = p_input->width;
+ p_obj->height = p_input->height;
+ p_obj->out_filename[i] = p_in->out_filename;
+ p_obj->use_ion = 1;
+ p_obj->min_out_bufs = p_input->min_out_bufs;
+
+ /* allocate buffers */
+ p_obj->input[i].size = size * (size_t)p_input->col_fmt.mult.numerator /
+ (size_t)p_input->col_fmt.mult.denominator;
+ rc = mm_jpeg_test_alloc(&p_obj->input[i], p_obj->use_ion);
+ if (rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+
+ rc = mm_jpeg_test_read(p_obj, i);
+ if (rc) {
+ LOGE("Error, unable to read input image");
+ return -1;
+ }
+
+ mm_jpeg_test_read_qtable(p_input->qtable_luma_file, false);
+ if (rc) {
+ LOGE("Error, unable to read luma qtable");
+ return -1;
+ }
+
+ mm_jpeg_test_read_qtable(p_input->qtable_chroma_file, true);
+ if (rc) {
+ LOGE("Error, unable to read chrome qtable");
+ return -1;
+ }
+
+ /* src buffer config*/
+ p_params->src_main_buf[i].buf_size = p_obj->input[i].size;
+ p_params->src_main_buf[i].buf_vaddr = p_obj->input[i].addr;
+ p_params->src_main_buf[i].fd = p_obj->input[i].p_pmem_fd;
+ p_params->src_main_buf[i].index = i;
+ p_params->src_main_buf[i].format = MM_JPEG_FMT_YUV;
+ p_params->src_main_buf[i].offset.mp[0].len = (uint32_t)size;
+ p_params->src_main_buf[i].offset.mp[0].stride = p_input->width;
+ p_params->src_main_buf[i].offset.mp[0].scanline = p_input->height;
+ p_params->src_main_buf[i].offset.mp[1].len = (uint32_t)(size >> 1);
+
+ /* src buffer config*/
+ p_params->src_thumb_buf[i].buf_size = p_obj->input[i].size;
+ p_params->src_thumb_buf[i].buf_vaddr = p_obj->input[i].addr;
+ p_params->src_thumb_buf[i].fd = p_obj->input[i].p_pmem_fd;
+ p_params->src_thumb_buf[i].index = i;
+ p_params->src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+ p_params->src_thumb_buf[i].offset.mp[0].len = (uint32_t)size;
+ p_params->src_thumb_buf[i].offset.mp[0].stride = p_input->width;
+ p_params->src_thumb_buf[i].offset.mp[0].scanline = p_input->height;
+ p_params->src_thumb_buf[i].offset.mp[1].len = (uint32_t)(size >> 1);
+
+
+ i++;
+ } while((++p_in)->filename);
+
+ p_obj->num_bufs = i;
+
+ pthread_mutex_init(&p_obj->lock, NULL);
+ pthread_cond_init(&p_obj->cond, NULL);
+
+
+ /* set encode parameters */
+ p_params->jpeg_cb = mm_jpeg_encode_callback;
+ p_params->userdata = p_obj;
+ p_params->color_format = p_input->col_fmt.fmt;
+ p_params->thumb_color_format = p_input->col_fmt.fmt;
+
+ if (p_obj->min_out_bufs) {
+ p_params->num_dst_bufs = 2;
+ } else {
+ p_params->num_dst_bufs = p_obj->num_bufs;
+ }
+
+ for (i = 0; i < (uint32_t)p_params->num_dst_bufs; i++) {
+ p_obj->output[i].size = size * 3/2;
+ rc = mm_jpeg_test_alloc(&p_obj->output[i], 0);
+ if (rc) {
+ LOGE("Error");
+ return -1;
+ }
+ /* dest buffer config */
+ p_params->dest_buf[i].buf_size = p_obj->output[i].size;
+ p_params->dest_buf[i].buf_vaddr = p_obj->output[i].addr;
+ p_params->dest_buf[i].fd = p_obj->output[i].p_pmem_fd;
+ p_params->dest_buf[i].index = i;
+ }
+
+
+ p_params->num_src_bufs = p_obj->num_bufs;
+ p_params->num_tmb_bufs = 0;
+ g_count = p_params->num_src_bufs;
+
+ p_params->encode_thumbnail = p_input->encode_thumbnail;
+ if (p_params->encode_thumbnail) {
+ p_params->num_tmb_bufs = p_obj->num_bufs;
+ }
+ p_params->quality = (uint32_t)p_input->main_quality;
+ p_params->thumb_quality = (uint32_t)p_input->thumb_quality;
+
+ p_job_params->dst_index = 0;
+ p_job_params->src_index = 0;
+ p_job_params->rotation = 0;
+
+ /* main dimension */
+ p_job_params->main_dim.src_dim.width = p_obj->width;
+ p_job_params->main_dim.src_dim.height = p_obj->height;
+ p_job_params->main_dim.dst_dim.width = p_obj->width;
+ p_job_params->main_dim.dst_dim.height = p_obj->height;
+ p_job_params->main_dim.crop.top = 0;
+ p_job_params->main_dim.crop.left = 0;
+ p_job_params->main_dim.crop.width = p_obj->width;
+ p_job_params->main_dim.crop.height = p_obj->height;
+
+ p_params->main_dim = p_job_params->main_dim;
+
+ /* thumb dimension */
+ p_job_params->thumb_dim.src_dim.width = p_obj->width;
+ p_job_params->thumb_dim.src_dim.height = p_obj->height;
+ p_job_params->thumb_dim.dst_dim.width = p_input->tmb_width;
+ p_job_params->thumb_dim.dst_dim.height = p_input->tmb_height;
+ p_job_params->thumb_dim.crop.top = 0;
+ p_job_params->thumb_dim.crop.left = 0;
+ p_job_params->thumb_dim.crop.width = 0;
+ p_job_params->thumb_dim.crop.height = 0;
+
+ p_params->thumb_dim = p_job_params->thumb_dim;
+
+ p_job_params->exif_info.numOfEntries = 0;
+ p_params->burst_mode = burst_mode;
+
+ /* Qtable */
+ p_job_params->qtable[0].eQuantizationTable =
+ OMX_IMAGE_QuantizationTableLuma;
+ p_job_params->qtable[1].eQuantizationTable =
+ OMX_IMAGE_QuantizationTableChroma;
+ p_job_params->qtable_set[0] = 1;
+ p_job_params->qtable_set[1] = 1;
+
+ for (i = 0; i < QUANT_SIZE; i++) {
+ p_job_params->qtable[0].nQuantizationMatrix[i] = DEFAULT_QTABLE_0[i];
+ p_job_params->qtable[1].nQuantizationMatrix[i] = DEFAULT_QTABLE_1[i];
+ }
+
+ p_obj->pic_size.w = (uint32_t)p_input->width;
+ p_obj->pic_size.h = (uint32_t)p_input->height;
+
+ p_obj->clinet_id = client_id;
+
+ return 0;
+}
+
+static void *encode_test(void *data)
+{
+ int rc = 0;
+ mm_jpeg_intf_test_t *jpeg_obj = (mm_jpeg_intf_test_t *)data;
+ char file_name[64];
+
+ uint32_t i = 0;
+ jpeg_obj->handle = jpeg_open(&jpeg_obj->ops, NULL, jpeg_obj->pic_size, NULL);
+ if (jpeg_obj->handle == 0) {
+ LOGE("Error");
+ jpeg_obj->ret = -1;
+ goto end;
+ }
+
+ rc = jpeg_obj->ops.create_session(jpeg_obj->handle, &jpeg_obj->params,
+ &jpeg_obj->job.encode_job.session_id);
+ if (jpeg_obj->job.encode_job.session_id == 0) {
+ LOGE("Error");
+ jpeg_obj->ret = -1;
+ goto end;
+ }
+
+ for (i = 0; i < jpeg_obj->num_bufs; i++) {
+ jpeg_obj->job.job_type = JPEG_JOB_TYPE_ENCODE;
+ jpeg_obj->job.encode_job.src_index = (int32_t) i;
+ jpeg_obj->job.encode_job.dst_index = (int32_t) i;
+ jpeg_obj->job.encode_job.thumb_index = (uint32_t) i;
+
+ if (jpeg_obj->params.burst_mode && jpeg_obj->min_out_bufs) {
+ jpeg_obj->job.encode_job.dst_index = -1;
+ }
+
+ rc = jpeg_obj->ops.start_job(&jpeg_obj->job, &jpeg_obj->job_id[i]);
+ if (rc) {
+ LOGE("Error");
+ jpeg_obj->ret = rc;
+ goto end;
+ }
+ }
+ jpeg_obj->job_id[i] = 0;
+
+ /*
+ usleep(5);
+ jpeg_obj->ops.abort_job(jpeg_obj->job_id[0]);
+ */
+ pthread_mutex_lock(&jpeg_obj->lock);
+ pthread_cond_wait(&jpeg_obj->cond, &jpeg_obj->lock);
+ pthread_mutex_unlock(&jpeg_obj->lock);
+
+ jpeg_obj->ops.destroy_session(jpeg_obj->job.encode_job.session_id);
+ jpeg_obj->ops.close(jpeg_obj->handle);
+
+end:
+ for (i = 0; i < jpeg_obj->num_bufs; i++) {
+ if (!jpeg_obj->min_out_bufs) {
+ // Save output files
+ LOGE("Saving file%s addr %p len %zu",
+ jpeg_obj->out_filename[i],
+ jpeg_obj->output[i].addr, jpeg_obj->buf_filled_len[i]);
+
+ snprintf(file_name, sizeof(file_name), "%s_%d.jpg",
+ jpeg_obj->out_filename[i], jpeg_obj->clinet_id);
+ fprintf(stderr, "Output file for client %d = %s\n",
+ jpeg_obj->clinet_id, file_name);
+
+ DUMP_TO_FILE(file_name, jpeg_obj->output[i].addr,
+ jpeg_obj->buf_filled_len[i]);
+ }
+ mm_jpeg_test_free(&jpeg_obj->input[i]);
+ mm_jpeg_test_free(&jpeg_obj->output[i]);
+ }
+ return NULL;
+}
+
+#define MAX_FILE_CNT (20)
+static int mm_jpeg_test_get_input(int argc, char *argv[],
+ jpeg_test_input_t *p_test)
+{
+ int c;
+ size_t in_file_cnt = 0, out_file_cnt = 0, i;
+ int idx = 0;
+ jpeg_test_input_t *p_test_base = p_test;
+
+ char *in_files[MAX_FILE_CNT];
+ char *out_files[MAX_FILE_CNT];
+
+ while ((c = getopt(argc, argv, "-I:O:W:H:F:BTx:y:Q:J:K:C:q:")) != -1) {
+ switch (c) {
+ case 'B':
+ fprintf(stderr, "%-25s\n", "Using burst mode");
+ p_test->burst_mode = 1;
+ break;
+ case 'I':
+ for (idx = optind - 1; idx < argc; idx++) {
+ if (argv[idx][0] == '-') {
+ break;
+ }
+ in_files[in_file_cnt++] = argv[idx];
+ }
+ optind = idx -1;
+
+ break;
+ case 'O':
+ for (idx = optind - 1; idx < argc; idx++) {
+ if (argv[idx][0] == '-') {
+ break;
+ }
+ out_files[out_file_cnt++] = argv[idx];
+ }
+ optind = idx -1;
+
+ break;
+ case 'W':
+ p_test->width = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Width: ", p_test->width);
+ break;
+ case 'H':
+ p_test->height = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Height: ", p_test->height);
+ break;
+ case 'F':
+ p_test->col_fmt = color_formats[atoi(optarg)];
+ fprintf(stderr, "%-25s%s\n", "Format: ", p_test->col_fmt.str);
+ break;
+ case 'M':
+ p_test->min_out_bufs = 1;
+ fprintf(stderr, "%-25s\n", "Using minimum number of output buffers");
+ break;
+ case 'T':
+ p_test->encode_thumbnail = 1;
+ fprintf(stderr, "%-25s\n", "Encode thumbnail");
+ break;
+ case 'x':
+ p_test->tmb_width = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Tmb Width: ", p_test->tmb_width);
+ break;
+ case 'y':
+ p_test->tmb_height = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Tmb Height: ", p_test->tmb_height);
+ break;
+ case 'Q':
+ p_test->main_quality = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Main quality: ", p_test->main_quality);
+ break;
+ case 'q':
+ p_test->thumb_quality = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Thumb quality: ", p_test->thumb_quality);
+ break;
+ case 'J':
+ p_test->qtable_luma_file = optarg;
+ fprintf(stderr, "%-25s%s\n", "Qtable luma path",
+ p_test->qtable_luma_file);
+ break;
+ case 'K':
+ p_test->qtable_chroma_file = optarg;
+ fprintf(stderr, "%-25s%s\n", "Qtable chroma path",
+ p_test->qtable_chroma_file);
+ break;
+ case 'C':
+ p_test->client_cnt = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Number of clients ",
+ p_test->client_cnt);
+ default:;
+ }
+ }
+ fprintf(stderr, "Infiles: %zu Outfiles: %zu\n", in_file_cnt, out_file_cnt);
+
+ if (p_test->client_cnt > MAX_NUM_CLIENT) {
+ fprintf(stderr, "Clients requested exceeds max limit %d\n",
+ MAX_NUM_CLIENT);
+ return 1;
+ }
+ if (in_file_cnt > out_file_cnt) {
+ fprintf(stderr, "%-25s\n", "Insufficient number of output files!");
+ return 1;
+ }
+
+ // Discard the extra out files
+ out_file_cnt = in_file_cnt;
+
+ p_test = realloc(p_test, (in_file_cnt + 1) * sizeof(*p_test));
+ if (!p_test) {
+ LOGE("Error");
+ return 1;
+ }
+ memset(p_test+1, 0, (in_file_cnt) * sizeof(*p_test));
+
+ for (i = 0; i < in_file_cnt; i++, p_test++) {
+ memcpy(p_test, p_test_base, sizeof(*p_test));
+ p_test->filename = in_files[i];
+ p_test->out_filename = out_files[i];
+ fprintf(stderr, "Inf: %s Outf: %s\n", in_files[i], out_files[i]);
+ }
+
+ return 0;
+}
+
+static void mm_jpeg_test_print_usage()
+{
+ fprintf(stderr, "Usage: program_name [options]\n");
+ fprintf(stderr, "Mandatory options:\n");
+ fprintf(stderr, " -I FILE1 [FILE2] [FILEN]\tList of input files\n");
+ fprintf(stderr, " -O FILE1 [FILE2] [FILEN]\tList of output files\n");
+ fprintf(stderr, " -W WIDTH\t\tOutput image width\n");
+ fprintf(stderr, " -H HEIGHT\t\tOutput image height\n");
+ fprintf(stderr, " -F \t\tColor format: \n");
+ fprintf(stderr, "\t\t\t\t%s (0), %s (1), %s (2) %s (3)\n"
+ "\t\t\t\t%s (4), %s (5), %s (6) %s (7)\n ",
+ color_formats[0].str, color_formats[1].str,
+ color_formats[2].str, color_formats[3].str,
+ color_formats[4].str, color_formats[5].str,
+ color_formats[6].str, color_formats[7].str);
+ fprintf(stderr, "Optional:\n");
+ fprintf(stderr, " -T \t\Encode thumbnail\n");
+ fprintf(stderr, " -x TMB_WIDTH\t\tThumbnail width\n");
+ fprintf(stderr, " -y TMB_HEIGHT\t\tThumbnail height\n");
+ fprintf(stderr, " -Q MAIN_QUALITY\t\tMain image quality\n");
+ fprintf(stderr, " -q TMB_QUALITY\t\tThumbnail image quality\n");
+ fprintf(stderr, " -B \t\tBurst mode. Utilize both encoder engines on"
+ "supported targets\n");
+ fprintf(stderr, " -M \t\tUse minimum number of output buffers \n");
+ fprintf(stderr, " -J \t\tLuma QTable filename. Comma separated 8x8"
+ " matrix\n");
+ fprintf(stderr, " -K \t\tChroma QTable filename. Comma separated"
+ " 8x8 matrix\n");
+ fprintf(stderr, " -C \t\tNumber of clients to run in parllel\n");
+ fprintf(stderr, "\n");
+}
+
+/** main:
+ *
+ * Arguments:
+ * @argc
+ * @argv
+ *
+ * Return:
+ * 0 or -ve values
+ *
+ * Description:
+ * main function
+ *
+ **/
+int main(int argc, char* argv[])
+{
+ jpeg_test_input_t *p_test_input;
+ mm_jpeg_intf_test_t client[MAX_NUM_CLIENT];
+ int ret = 0;
+ int i = 0;
+ int thread_cnt = 0;
+
+ if (argc > 1) {
+ p_test_input = calloc(2, sizeof(*p_test_input));
+ if (!p_test_input) {
+ LOGE("Error");
+ goto exit;
+ }
+ memcpy(p_test_input, &jpeg_input[0], sizeof(*p_test_input));
+ ret = mm_jpeg_test_get_input(argc, argv, p_test_input);
+ if (ret) {
+ LOGE("Error");
+ goto exit;
+ }
+ } else {
+ mm_jpeg_test_print_usage();
+ return 1;
+ }
+
+ for (i = 0; i < p_test_input->client_cnt; i++) {
+ memset(&client[i], 0x0, sizeof(mm_jpeg_intf_test_t));
+ ret = encode_init(p_test_input, &client[i], i);
+ if (ret) {
+ LOGE("Error");
+ return -1;
+ }
+
+ ret = pthread_create(&client[i].thread_id, NULL, encode_test,
+ &client[i]);
+ if (ret != 0) {
+ fprintf(stderr, "Error in thread creation\n");
+ break;
+ }
+ }
+
+ thread_cnt = i;
+ for (i = 0; i < thread_cnt; i++) {
+ pthread_join(client[i].thread_id, NULL);
+ }
+
+exit:
+ for (i = 0; i < thread_cnt; i++) {
+ if (!client[i].ret) {
+ fprintf(stderr, "%-25s %d %s\n", "Client", i, "Success!");
+ } else {
+ fprintf(stderr, "%-25s %d %s\n", "Client", i, "Fail!");
+ }
+ }
+
+ if (argc > 1) {
+ if (p_test_input) {
+ free(p_test_input);
+ p_test_input = NULL;
+ }
+ }
+
+ return ret;
+}
+
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c
new file mode 100644
index 0000000..beb62f5
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c
@@ -0,0 +1,479 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <stdlib.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+
+#define MIN(a,b) (((a) < (b)) ? (a) : (b))
+#define MAX(a,b) (((a) > (b)) ? (a) : (b))
+#define CLAMP(x, min, max) MIN(MAX((x), (min)), (max))
+
+#define TIME_IN_US(r) ((uint64_t)r.tv_sec * 1000000LL + (uint64_t)r.tv_usec)
+struct timeval dtime[2];
+
+
+/** DUMP_TO_FILE:
+ * @filename: file name
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+ size_t rc = 0; \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ rc = fwrite(p_addr, 1, len, fp); \
+ fclose(fp); \
+ } else { \
+ LOGE("cannot dump image"); \
+ } \
+})
+
+static int g_count = 1, g_i;
+
+typedef struct {
+ char *filename;
+ int width;
+ int height;
+ char *out_filename;
+ int format;
+} jpeg_test_input_t;
+
+typedef struct {
+ char *filename;
+ int width;
+ int height;
+ char *out_filename;
+ pthread_mutex_t lock;
+ pthread_cond_t cond;
+ buffer_t input;
+ buffer_t output;
+ int use_ion;
+ uint32_t handle;
+ mm_jpegdec_ops_t ops;
+ uint32_t job_id[5];
+ mm_jpeg_decode_params_t params;
+ mm_jpeg_job_t job;
+ uint32_t session_id;
+} mm_jpegdec_intf_test_t;
+
+typedef struct {
+ char *format_str;
+ int eColorFormat;
+} mm_jpegdec_col_fmt_t;
+
+#define ARR_SZ(a) (sizeof(a)/sizeof(a[0]))
+
+static const mm_jpegdec_col_fmt_t col_formats[] =
+{
+ { "YCRCBLP_H2V2", (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2 },
+ { "YCBCRLP_H2V2", (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2 },
+ { "YCRCBLP_H2V1", (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1 },
+ { "YCBCRLP_H2V1", (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1 },
+ { "YCRCBLP_H1V2", (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2 },
+ { "YCBCRLP_H1V2", (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2 },
+ { "YCRCBLP_H1V1", (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1 },
+ { "YCBCRLP_H1V1", (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1 }
+};
+
+static void mm_jpegdec_decode_callback(jpeg_job_status_t status,
+ uint32_t client_hdl,
+ uint32_t jobId,
+ mm_jpeg_output_t *p_output,
+ void *userData)
+{
+ mm_jpegdec_intf_test_t *p_obj = (mm_jpegdec_intf_test_t *)userData;
+
+ if (status == JPEG_JOB_STATUS_ERROR) {
+ LOGE("Decode error");
+ } else {
+ gettimeofday(&dtime[1], NULL);
+ LOGE("Decode time %llu ms",
+ ((TIME_IN_US(dtime[1]) - TIME_IN_US(dtime[0]))/1000));
+
+ LOGE("Decode success file%s addr %p len %zu",
+ p_obj->out_filename,
+ p_output->buf_vaddr, p_output->buf_filled_len);
+ DUMP_TO_FILE(p_obj->out_filename, p_output->buf_vaddr, p_output->buf_filled_len);
+ }
+ g_i++;
+ if (g_i >= g_count) {
+ LOGE("Signal the thread");
+ pthread_cond_signal(&p_obj->cond);
+ }
+}
+
+int mm_jpegdec_test_alloc(buffer_t *p_buffer, int use_pmem)
+{
+ int ret = 0;
+ /*Allocate buffers*/
+ if (use_pmem) {
+ p_buffer->addr = (uint8_t *)buffer_allocate(p_buffer, 0);
+ if (NULL == p_buffer->addr) {
+ LOGE("Error");
+ return -1;
+ }
+ } else {
+ /* Allocate heap memory */
+ p_buffer->addr = (uint8_t *)malloc(p_buffer->size);
+ if (NULL == p_buffer->addr) {
+ LOGE("Error");
+ return -1;
+ }
+ }
+ return ret;
+}
+
+void mm_jpegdec_test_free(buffer_t *p_buffer)
+{
+ if (p_buffer->addr == NULL)
+ return;
+
+ if (p_buffer->p_pmem_fd >= 0)
+ buffer_deallocate(p_buffer);
+ else
+ free(p_buffer->addr);
+
+ memset(p_buffer, 0x0, sizeof(buffer_t));
+}
+
+int mm_jpegdec_test_read(mm_jpegdec_intf_test_t *p_obj)
+{
+ int rc = 0;
+ FILE *fp = NULL;
+ size_t file_size = 0;
+ fp = fopen(p_obj->filename, "rb");
+ if (!fp) {
+ LOGE("error");
+ return -1;
+ }
+ fseek(fp, 0, SEEK_END);
+ file_size = (size_t)ftell(fp);
+ fseek(fp, 0, SEEK_SET);
+
+ LOGE("input file size is %zu",
+ file_size);
+
+ p_obj->input.size = file_size;
+
+ /* allocate buffers */
+ rc = mm_jpegdec_test_alloc(&p_obj->input, p_obj->use_ion);
+ if (rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+ fread(p_obj->input.addr, 1, p_obj->input.size, fp);
+ fclose(fp);
+ return 0;
+}
+
+void chromaScale(mm_jpeg_color_format format, double *cScale)
+{
+ double scale;
+
+ switch(format) {
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+ scale = 1.5;
+ break;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+ scale = 2.0;
+ break;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+ scale = 3.0;
+ break;
+ case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+ scale = 1.0;
+ break;
+ default:
+ scale = 0;
+ LOGE("color format Error");
+ }
+
+ *cScale = scale;
+}
+
+static int decode_init(jpeg_test_input_t *p_input, mm_jpegdec_intf_test_t *p_obj)
+{
+ int rc = -1;
+ size_t size = (size_t)(CEILING16(p_input->width) * CEILING16(p_input->height));
+ double cScale;
+ mm_jpeg_decode_params_t *p_params = &p_obj->params;
+ mm_jpeg_decode_job_t *p_job_params = &p_obj->job.decode_job;
+
+ p_obj->filename = p_input->filename;
+ p_obj->width = p_input->width;
+ p_obj->height = p_input->height;
+ p_obj->out_filename = p_input->out_filename;
+ p_obj->use_ion = 1;
+
+ pthread_mutex_init(&p_obj->lock, NULL);
+ pthread_cond_init(&p_obj->cond, NULL);
+
+ chromaScale(p_input->format, &cScale);
+ p_obj->output.size = (size_t)((double)size * cScale);
+ rc = mm_jpegdec_test_alloc(&p_obj->output, p_obj->use_ion);
+ if (rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+ rc = mm_jpegdec_test_read(p_obj);
+ if (rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+ /* set encode parameters */
+ p_params->jpeg_cb = mm_jpegdec_decode_callback;
+ p_params->userdata = p_obj;
+ p_params->color_format = p_input->format;
+
+ /* dest buffer config */
+ p_params->dest_buf[0].buf_size = p_obj->output.size;
+ p_params->dest_buf[0].buf_vaddr = p_obj->output.addr;
+ p_params->dest_buf[0].fd = p_obj->output.p_pmem_fd;
+ p_params->dest_buf[0].format = MM_JPEG_FMT_YUV;
+ p_params->dest_buf[0].offset.mp[0].len = (uint32_t)size;
+ p_params->dest_buf[0].offset.mp[1].len =
+ (uint32_t)((double)size * (cScale - 1.0));
+ p_params->dest_buf[0].offset.mp[0].stride = CEILING16(p_input->width);
+ p_params->dest_buf[0].offset.mp[0].scanline = CEILING16(p_input->height);
+ p_params->dest_buf[0].offset.mp[1].stride = CEILING16(p_input->width);
+ p_params->dest_buf[0].offset.mp[1].scanline = CEILING16(p_input->height);
+ p_params->dest_buf[0].index = 0;
+ p_params->num_dst_bufs = 1;
+
+ /* src buffer config*/
+ p_params->src_main_buf[0].buf_size = p_obj->input.size;
+ p_params->src_main_buf[0].buf_vaddr = p_obj->input.addr;
+ p_params->src_main_buf[0].fd = p_obj->input.p_pmem_fd;
+ p_params->src_main_buf[0].index = 0;
+ p_params->src_main_buf[0].format = MM_JPEG_FMT_BITSTREAM;
+ /*
+ p_params->src_main_buf[0].offset.mp[0].len = size;
+ p_params->src_main_buf[0].offset.mp[1].len = size >> 1;
+ */
+ p_params->num_src_bufs = 1;
+
+ p_job_params->dst_index = 0;
+ p_job_params->src_index = 0;
+ p_job_params->rotation = 0;
+
+ /* main dimension */
+ p_job_params->main_dim.src_dim.width = p_obj->width;
+ p_job_params->main_dim.src_dim.height = p_obj->height;
+ p_job_params->main_dim.dst_dim.width = p_obj->width;
+ p_job_params->main_dim.dst_dim.height = p_obj->height;
+ p_job_params->main_dim.crop.top = 0;
+ p_job_params->main_dim.crop.left = 0;
+ p_job_params->main_dim.crop.width = p_obj->width;
+ p_job_params->main_dim.crop.height = p_obj->height;
+
+
+ return 0;
+}
+
+void omx_test_dec_print_usage()
+{
+ fprintf(stderr, "Usage: program_name [options]\n");
+ fprintf(stderr, "Mandatory options:\n");
+ fprintf(stderr, " -I FILE\t\tPath to the input file.\n");
+ fprintf(stderr, " -O FILE\t\tPath for the output file.\n");
+ fprintf(stderr, " -W WIDTH\t\tOutput image width\n");
+ fprintf(stderr, " -H HEIGHT\t\tOutput image height\n");
+ fprintf(stderr, "Optional:\n");
+ fprintf(stderr, " -F FORMAT\t\tDefault image format:\n");
+ fprintf(stderr, "\t\t\t\t%s (0), %s (1), %s (2) %s (3)\n"
+ "%s (4), %s (5), %s (6) %s (7)\n",
+ col_formats[0].format_str, col_formats[1].format_str,
+ col_formats[2].format_str, col_formats[3].format_str,
+ col_formats[4].format_str, col_formats[5].format_str,
+ col_formats[6].format_str, col_formats[7].format_str
+ );
+
+ fprintf(stderr, "\n");
+}
+
+static int mm_jpegdec_test_get_input(int argc, char *argv[],
+ jpeg_test_input_t *p_test)
+{
+ int c;
+
+ while ((c = getopt(argc, argv, "I:O:W:H:F:")) != -1) {
+ switch (c) {
+ case 'O':
+ p_test->out_filename = optarg;
+ fprintf(stderr, "%-25s%s\n", "Output image path",
+ p_test->out_filename);
+ break;
+ case 'I':
+ p_test->filename = optarg;
+ fprintf(stderr, "%-25s%s\n", "Input image path", p_test->filename);
+ break;
+ case 'W':
+ p_test->width = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Default width", p_test->width);
+ break;
+ case 'H':
+ p_test->height = atoi(optarg);
+ fprintf(stderr, "%-25s%d\n", "Default height", p_test->height);
+ break;
+ case 'F': {
+ int format = 0;
+ format = atoi(optarg);
+ int num_formats = ARR_SZ(col_formats);
+ format = CLAMP(format, 0, num_formats);
+ p_test->format = col_formats[format].eColorFormat;
+ fprintf(stderr, "%-25s%s\n", "Default image format",
+ col_formats[format].format_str);
+ break;
+ }
+ default:;
+ }
+ }
+ if (!p_test->filename || !p_test->filename || !p_test->width ||
+ !p_test->height) {
+ fprintf(stderr, "Missing required arguments.\n");
+ omx_test_dec_print_usage();
+ return -1;
+ }
+ return 0;
+}
+
+static int decode_test(jpeg_test_input_t *p_input)
+{
+ int rc = 0;
+ mm_jpegdec_intf_test_t jpeg_obj;
+ int i = 0;
+
+ memset(&jpeg_obj, 0x0, sizeof(jpeg_obj));
+ rc = decode_init(p_input, &jpeg_obj);
+ if (rc) {
+ LOGE("Error");
+ return -1;
+ }
+
+ jpeg_obj.handle = jpegdec_open(&jpeg_obj.ops);
+ if (jpeg_obj.handle == 0) {
+ LOGE("Error");
+ goto end;
+ }
+
+ rc = jpeg_obj.ops.create_session(jpeg_obj.handle, &jpeg_obj.params,
+ &jpeg_obj.job.decode_job.session_id);
+ if (jpeg_obj.job.decode_job.session_id == 0) {
+ LOGE("Error");
+ goto end;
+ }
+
+ for (i = 0; i < g_count; i++) {
+ jpeg_obj.job.job_type = JPEG_JOB_TYPE_DECODE;
+
+ LOGE("Starting decode job");
+ gettimeofday(&dtime[0], NULL);
+
+ fprintf(stderr, "Starting decode of %s into %s outw %d outh %d\n\n",
+ p_input->filename, p_input->out_filename,
+ p_input->width, p_input->height);
+ rc = jpeg_obj.ops.start_job(&jpeg_obj.job, &jpeg_obj.job_id[i]);
+ if (rc) {
+ LOGE("Error");
+ goto end;
+ }
+ }
+
+ /*
+ usleep(5);
+ jpeg_obj.ops.abort_job(jpeg_obj.job_id[0]);
+ */
+ pthread_mutex_lock(&jpeg_obj.lock);
+ pthread_cond_wait(&jpeg_obj.cond, &jpeg_obj.lock);
+ pthread_mutex_unlock(&jpeg_obj.lock);
+
+ fprintf(stderr, "Decode time %llu ms\n",
+ ((TIME_IN_US(dtime[1]) - TIME_IN_US(dtime[0]))/1000));
+
+
+ jpeg_obj.ops.destroy_session(jpeg_obj.job.decode_job.session_id);
+
+ jpeg_obj.ops.close(jpeg_obj.handle);
+
+
+end:
+ mm_jpegdec_test_free(&jpeg_obj.input);
+ mm_jpegdec_test_free(&jpeg_obj.output);
+ return 0;
+}
+
+/** main:
+ *
+ * Arguments:
+ * @argc
+ * @argv
+ *
+ * Return:
+ * 0 or -ve values
+ *
+ * Description:
+ * main function
+ *
+ **/
+int main(int argc, char* argv[])
+{
+ jpeg_test_input_t dec_test_input;
+ int ret;
+
+ memset(&dec_test_input, 0, sizeof(dec_test_input));
+ ret = mm_jpegdec_test_get_input(argc, argv, &dec_test_input);
+
+ if (ret) {
+ return -1;
+ }
+
+ return decode_test(&dec_test_input);
+}
+
+
diff --git a/camera/QCamera2/stack/mm-lib2d-interface/Android.mk b/camera/QCamera2/stack/mm-lib2d-interface/Android.mk
new file mode 100644
index 0000000..696f04a
--- /dev/null
+++ b/camera/QCamera2/stack/mm-lib2d-interface/Android.mk
@@ -0,0 +1,39 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_CFLAGS+= -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+IMGLIB_HEADER_PATH := $(TARGET_OUT_INTERMEDIATES)/include/mm-camera/imglib
+
+LOCAL_C_INCLUDES += \
+ $(IMGLIB_HEADER_PATH) \
+ $(LOCAL_PATH)/inc \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../mm-camera-interface/inc \
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+ LOCAL_CFLAGS += -DUSE_ION
+endif
+
+
+LOCAL_SRC_FILES := \
+ src/mm_lib2d.c
+
+LOCAL_MODULE := libmmlib2d_interface
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libmmcamera_interface
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h b/camera/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h
new file mode 100644
index 0000000..d1e69b4
--- /dev/null
+++ b/camera/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h
@@ -0,0 +1,209 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_LIB2D_H_
+#define MM_LIB2D_H_
+
+#include "cam_types.h"
+#ifdef QCAMERA_REDEFINE_LOG
+#ifndef CAM_MODULE
+#define CAM_MODULE CAM_NO_MODULE
+#endif
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#endif
+
+/** lib2d_error
+ * @MM_LIB2D_SUCCESS: Success
+ * @MM_LIB2D_ERR_GENERAL: General Error
+ * @MM_LIB2D_ERR_MEMORY: Insufficient memory error
+ * @MM_LIB2D_ERR_BAD_PARAM: Bad params error
+**/
+typedef enum lib2d_error_t {
+ MM_LIB2D_SUCCESS,
+ MM_LIB2D_ERR_GENERAL,
+ MM_LIB2D_ERR_MEMORY,
+ MM_LIB2D_ERR_BAD_PARAM,
+} lib2d_error;
+
+/** lib2d_mode
+ * @MM_LIB2D_SYNC_MODE: Synchronous mode
+ * @MM_LIB2D_ASYNC_MODE: Asynchronous mode
+**/
+typedef enum mm_lib2d_mode_t {
+ MM_LIB2D_SYNC_MODE,
+ MM_LIB2D_ASYNC_MODE,
+} lib2d_mode;
+
+/** mm_lib2d_buffer_type
+ * @MM_LIB2D_BUFFER_TYPE_RGB: RGB Buffer type
+ * @MM_LIB2D_BUFFER_TYPE_YUV: YUV buffer type
+**/
+typedef enum mm_lib2d_buffer_type_t {
+ MM_LIB2D_BUFFER_TYPE_RGB,
+ MM_LIB2D_BUFFER_TYPE_YUV,
+} mm_lib2d_buffer_type;
+
+/** mm_lib2d_rgb_buffer
+ * @fd: handle to the buffer memory
+ * @format: RGB color format
+ * @width: defines width in pixels
+ * @height: defines height in pixels
+ * @buffer: pointer to the RGB buffer
+ * @phys: gpu mapped physical address
+ * @stride: defines stride in bytes
+**/
+typedef struct mm_lib2d_rgb_buffer_t {
+ int32_t fd;
+ cam_format_t format;
+ uint32_t width;
+ uint32_t height;
+ void *buffer;
+ void *phys;
+ int32_t stride;
+} mm_lib2d_rgb_buffer;
+
+/** mm_lib2d_yuv_buffer
+ * @fd: handle to the buffer memory
+ * @format: YUV color format
+ * @width: defines width in pixels
+ * @height: defines height in pixels
+ * @plane0: holds the whole buffer if YUV format is not planar
+ * @phys0: gpu mapped physical address
+ * @stride0: stride in bytes
+ * @plane1: holds UV or VU plane for planar interleaved
+ * @phys2: gpu mapped physical address
+ * @stride1: stride in bytes
+ * @plane2: holds the 3. plane, ignored if YUV format is not planar
+ * @phys2: gpu mapped physical address
+ * @stride2: stride in bytes
+**/
+typedef struct mm_lib2d_yuv_buffer_t {
+ int32_t fd;
+ cam_format_t format;
+ uint32_t width;
+ uint32_t height;
+ void *plane0;
+ void *phys0;
+ int32_t stride0;
+ void *plane1;
+ void *phys1;
+ int32_t stride1;
+ void *plane2;
+ void *phys2;
+ int32_t stride2;
+} mm_lib2d_yuv_buffer;
+
+/** mm_lib2d_buffer
+ * @buffer_type: Buffer type. whether RGB or YUV
+ * @rgb_buffer: RGB buffer handle
+ * @yuv_buffer: YUV buffer handle
+**/
+typedef struct mm_lib2d_buffer_t {
+ mm_lib2d_buffer_type buffer_type;
+ union {
+ mm_lib2d_rgb_buffer rgb_buffer;
+ mm_lib2d_yuv_buffer yuv_buffer;
+ };
+} mm_lib2d_buffer;
+
+/** lib2d_client_cb
+ * @userdata: App userdata
+ * @jobid: job id
+**/
+typedef lib2d_error (*lib2d_client_cb) (void *userdata, int jobid);
+
+/**
+ * Function: mm_lib2d_init
+ *
+ * Description: Initialization function for Lib2D. src_format, dst_format
+ * are hints to the underlying component to initialize.
+ *
+ * Input parameters:
+ * mode - Mode (sync/async) in which App wants lib2d to run.
+ * src_format - source surface format
+ * dst_format - Destination surface format
+ * my_obj - handle that will be returned on succesful Init. App has to
+ * call other lib2d functions by passing this handle.
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_MEMORY
+ * MM_LIB2D_ERR_BAD_PARAM
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_init(lib2d_mode mode, cam_format_t src_format,
+ cam_format_t dst_format, void **lib2d_obj_handle);
+
+/**
+ * Function: mm_lib2d_deinit
+ *
+ * Description: De-Initialization function for Lib2D
+ *
+ * Input parameters:
+ * lib2d_obj_handle - handle tto the lib2d object
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_deinit(void *lib2d_obj_handle);
+
+/**
+ * Function: mm_lib2d_start_job
+ *
+ * Description: Start executing the job
+ *
+ * Input parameters:
+ * lib2d_obj_handle - handle tto the lib2d object
+ * src_buffer - pointer to the source buffer
+ * dst_buffer - pointer to the destination buffer
+ * jobid - job id of this request
+ * userdata - userdata that will be pass through callback function
+ * cb - callback function that will be called on completion of this job
+ * rotation - rotation to be applied
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_MEMORY
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_start_job(void *lib2d_obj_handle,
+ mm_lib2d_buffer* src_buffer, mm_lib2d_buffer* dst_buffer,
+ int jobid, void *userdata, lib2d_client_cb cb, uint32_t rotation);
+
+#endif /* MM_LIB2D_H_ */
+
+
diff --git a/camera/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c b/camera/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c
new file mode 100644
index 0000000..28ef27f
--- /dev/null
+++ b/camera/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c
@@ -0,0 +1,604 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <utils/Log.h>
+
+// System dependencies
+#include <errno.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <pthread.h>
+#include <sys/ioctl.h>
+#include <sys/prctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+// Camera dependencies
+#include "img_common.h"
+#include "img_comp.h"
+#include "img_comp_factory.h"
+#include "img_buffer.h"
+#include "lib2d.h"
+#include "mm_lib2d.h"
+#include "img_meta.h"
+
+/** lib2d_job_private_info
+ * @jobid: Job id of this process request
+ * @userdata: Client userdata that will be passed on callback
+ * @lib2d_client_cb: Application's callback function pointer
+ * which will be called upon completion of current job.
+**/
+typedef struct lib2d_job_private_info_t {
+ int jobid;
+ void *userdata;
+ lib2d_error (*lib2d_client_cb) (void *userdata, int jobid);
+} lib2d_job_private_info;
+
+/** img_lib_t
+ * @ptr: handle to imglib library
+ * @img_core_get_comp: function pointer for img_core_get_comp
+ * @img_wait_for_completion: function pointer for img_wait_for_completion
+**/
+typedef struct {
+ void *ptr;
+ int (*img_core_get_comp) (img_comp_role_t role, char *name,
+ img_core_ops_t *p_ops);
+ int (*img_wait_for_completion) (pthread_cond_t *p_cond,
+ pthread_mutex_t *p_mutex, int32_t ms);
+} img_lib_t;
+
+/** mm_lib2d_obj
+ * @core_ops: image core ops structure handle
+ * @comp: component structure handle
+ * @comp_mode: underlying component mode
+ * @lib2d_mode: lib2d mode requested by client
+ * @img_lib: imglib library, function ptrs handle
+ * @mutex: lib2d mutex used for synchronization
+ * @cond: librd cond used for synchronization
+**/
+typedef struct mm_lib2d_obj_t {
+ img_core_ops_t core_ops;
+ img_component_ops_t comp;
+ img_comp_mode_t comp_mode;
+ lib2d_mode lib2d_mode;
+ img_lib_t img_lib;
+ pthread_mutex_t mutex;
+ pthread_cond_t cond;
+} mm_lib2d_obj;
+
+
+/**
+ * Function: lib2d_event_handler
+ *
+ * Description: Event handler. All the component events
+ * are received here.
+ *
+ * Input parameters:
+ * p_appdata - lib2d test object
+ * p_event - pointer to the event
+ *
+ * Return values:
+ * IMG_SUCCESS
+ * IMG_ERR_INVALID_INPUT
+ *
+ * Notes: none
+ **/
+int lib2d_event_handler(void* p_appdata, img_event_t *p_event)
+{
+ mm_lib2d_obj *lib2d_obj = (mm_lib2d_obj *)p_appdata;
+
+ if ((NULL == p_event) || (NULL == p_appdata)) {
+ LOGE("invalid event");
+ return IMG_ERR_INVALID_INPUT;
+ }
+
+ LOGD("type %d", p_event->type);
+
+ switch (p_event->type) {
+ case QIMG_EVT_DONE:
+ pthread_cond_signal(&lib2d_obj->cond);
+ break;
+ default:;
+ }
+ return IMG_SUCCESS;
+}
+
+/**
+ * Function: lib2d_callback_handler
+ *
+ * Description: Callback handler. Registered with Component
+ * on IMG_COMP_INIT. Will be called when processing
+ * of current request is completed. If component running in
+ * async mode, this is where client will know the execution
+ * is finished for in, out frames.
+ *
+ * Input parameters:
+ * p_appdata - lib2d test object
+ * p_in_frame - pointer to input frame
+ * p_out_frame - pointer to output frame
+ * p_meta - pointer to meta data
+ *
+ * Return values:
+ * IMG_SUCCESS
+ * IMG_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+int lib2d_callback_handler(void *userdata, img_frame_t *p_in_frame,
+ img_frame_t *p_out_frame, img_meta_t *p_meta)
+{
+ lib2d_job_private_info *job_info = NULL;
+
+ if (NULL == userdata) {
+ LOGE("invalid event");
+ return IMG_ERR_INVALID_INPUT;
+ }
+
+ // assert(p_in_frame->private_data == p_out_frame->private_data);
+
+ job_info = (lib2d_job_private_info *)p_in_frame->private_data;
+ if (job_info->lib2d_client_cb != NULL) {
+ job_info->lib2d_client_cb(job_info->userdata, job_info->jobid);
+ }
+
+ free(p_in_frame->private_data);
+ free(p_in_frame);
+ free(p_out_frame);
+ free(p_meta);
+
+ return IMG_SUCCESS;
+}
+
+/**
+ * Function: lib2d_fill_img_frame
+ *
+ * Description: Setup img_frame_t for given buffer
+ *
+ * Input parameters:
+ * p_frame - pointer to img_frame_t that needs to be setup
+ * lib2d_buffer - pointer to input buffer
+ * jobid - job id
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_fill_img_frame(img_frame_t *p_frame,
+ mm_lib2d_buffer* lib2d_buffer, int jobid)
+{
+ // use job id for now
+ p_frame->frame_cnt = jobid;
+ p_frame->idx = jobid;
+ p_frame->frame_id = jobid;
+
+ if (lib2d_buffer->buffer_type == MM_LIB2D_BUFFER_TYPE_RGB) {
+ mm_lib2d_rgb_buffer *rgb_buffer = &lib2d_buffer->rgb_buffer;
+
+ p_frame->info.num_planes = 1;
+ p_frame->info.width = rgb_buffer->width;
+ p_frame->info.height = rgb_buffer->height;
+
+ p_frame->frame[0].plane_cnt = 1;
+ p_frame->frame[0].plane[0].plane_type = PLANE_ARGB;
+ p_frame->frame[0].plane[0].addr = rgb_buffer->buffer;
+ p_frame->frame[0].plane[0].stride = rgb_buffer->stride;
+ p_frame->frame[0].plane[0].length = (rgb_buffer->stride *
+ rgb_buffer->height);
+ p_frame->frame[0].plane[0].fd = rgb_buffer->fd;
+ p_frame->frame[0].plane[0].height = rgb_buffer->height;
+ p_frame->frame[0].plane[0].width = rgb_buffer->width;
+ p_frame->frame[0].plane[0].offset = 0;
+ p_frame->frame[0].plane[0].scanline = rgb_buffer->height;
+ } else if (lib2d_buffer->buffer_type == MM_LIB2D_BUFFER_TYPE_YUV) {
+ mm_lib2d_yuv_buffer *yuv_buffer = &lib2d_buffer->yuv_buffer;
+
+ p_frame->info.num_planes = 2;
+ p_frame->info.width = yuv_buffer->width;
+ p_frame->info.height = yuv_buffer->height;
+
+ p_frame->frame[0].plane_cnt = 2;
+ p_frame->frame[0].plane[0].plane_type = PLANE_Y;
+ p_frame->frame[0].plane[0].addr = yuv_buffer->plane0;
+ p_frame->frame[0].plane[0].stride = yuv_buffer->stride0;
+ p_frame->frame[0].plane[0].length = (yuv_buffer->stride0 *
+ yuv_buffer->height);
+ p_frame->frame[0].plane[0].fd = yuv_buffer->fd;
+ p_frame->frame[0].plane[0].height = yuv_buffer->height;
+ p_frame->frame[0].plane[0].width = yuv_buffer->width;
+ p_frame->frame[0].plane[0].offset = 0;
+ p_frame->frame[0].plane[0].scanline = yuv_buffer->height;
+
+ if (yuv_buffer->format == CAM_FORMAT_YUV_420_NV12) {
+ p_frame->frame[0].plane[1].plane_type = PLANE_CB_CR;
+ } else if(yuv_buffer->format == CAM_FORMAT_YUV_420_NV21) {
+ p_frame->frame[0].plane[1].plane_type = PLANE_CR_CB;
+ }
+ p_frame->frame[0].plane[1].addr = yuv_buffer->plane1;
+ p_frame->frame[0].plane[1].stride = yuv_buffer->stride1;
+ p_frame->frame[0].plane[1].length = (yuv_buffer->stride1 *
+ yuv_buffer->height / 2);
+ p_frame->frame[0].plane[1].fd = yuv_buffer->fd;
+ p_frame->frame[0].plane[1].height = yuv_buffer->height;
+ p_frame->frame[0].plane[1].width = yuv_buffer->width;
+ p_frame->frame[0].plane[1].offset = 0;
+ p_frame->frame[0].plane[1].scanline = yuv_buffer->height;
+ } else {
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+ return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: mm_lib2d_init
+ *
+ * Description: Initialization function for Lib2D. src_format, dst_format
+ * are hints to the underlying component to initialize.
+ *
+ * Input parameters:
+ * mode - Mode (sync/async) in which App wants lib2d to run.
+ * src_format - source surface format
+ * dst_format - Destination surface format
+ * my_obj - handle that will be returned on succesful Init. App has to
+ * call other lib2d functions by passing this handle.
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_MEMORY
+ * MM_LIB2D_ERR_BAD_PARAM
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+
+lib2d_error mm_lib2d_init(lib2d_mode mode, cam_format_t src_format,
+ cam_format_t dst_format, void **my_obj)
+{
+ int32_t rc = IMG_SUCCESS;
+ mm_lib2d_obj *lib2d_obj = NULL;
+ img_core_ops_t *p_core_ops = NULL;
+ img_component_ops_t *p_comp = NULL;
+
+ if (my_obj == NULL) {
+ return MM_LIB2D_ERR_BAD_PARAM;
+ }
+
+ // validate src_format, dst_format to check whether we support these.
+ // Currently support NV21 to ARGB conversions only. Others not tested.
+ if ((src_format != CAM_FORMAT_YUV_420_NV21) ||
+ (dst_format != CAM_FORMAT_8888_ARGB)) {
+ LOGE("Formats conversion from %d to %d not supported",
+ src_format, dst_format);
+ }
+
+ lib2d_obj = malloc(sizeof(mm_lib2d_obj));
+ if (lib2d_obj == NULL) {
+ return MM_LIB2D_ERR_MEMORY;
+ }
+
+ // Open libmmcamera_imglib
+ lib2d_obj->img_lib.ptr = dlopen("libmmcamera_imglib.so", RTLD_NOW);
+ if (!lib2d_obj->img_lib.ptr) {
+ LOGE("ERROR: couldn't dlopen libmmcamera_imglib.so: %s",
+ dlerror());
+ goto FREE_LIB2D_OBJ;
+ }
+
+ /* Get function pointer for functions supported by C2D */
+ *(void **)&lib2d_obj->img_lib.img_core_get_comp =
+ dlsym(lib2d_obj->img_lib.ptr, "img_core_get_comp");
+ *(void **)&lib2d_obj->img_lib.img_wait_for_completion =
+ dlsym(lib2d_obj->img_lib.ptr, "img_wait_for_completion");
+
+ /* Validate function pointers */
+ if ((lib2d_obj->img_lib.img_core_get_comp == NULL) ||
+ (lib2d_obj->img_lib.img_wait_for_completion == NULL)) {
+ LOGE(" ERROR mapping symbols from libc2d2.so");
+ goto FREE_LIB2D_OBJ;
+ }
+
+ p_core_ops = &lib2d_obj->core_ops;
+ p_comp = &lib2d_obj->comp;
+
+ pthread_mutex_init(&lib2d_obj->mutex, NULL);
+ pthread_cond_init(&lib2d_obj->cond, NULL);
+
+ rc = lib2d_obj->img_lib.img_core_get_comp(IMG_COMP_LIB2D,
+ "qti.lib2d", p_core_ops);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto FREE_LIB2D_OBJ;
+ }
+
+ rc = IMG_COMP_LOAD(p_core_ops, NULL);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto FREE_LIB2D_OBJ;
+ }
+
+ rc = IMG_COMP_CREATE(p_core_ops, p_comp);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_UNLOAD;
+ }
+
+ rc = IMG_COMP_INIT(p_comp, (void *)lib2d_obj, lib2d_callback_handler);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_UNLOAD;
+ }
+
+ rc = IMG_COMP_SET_CB(p_comp, lib2d_event_handler);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_DEINIT;
+ }
+
+ lib2d_obj->lib2d_mode = mode;
+ img_comp_mode_t comp_mode;
+ if (lib2d_obj->lib2d_mode == MM_LIB2D_SYNC_MODE) {
+ comp_mode = IMG_SYNC_MODE;
+ } else {
+ comp_mode = IMG_ASYNC_MODE;
+ }
+
+ // Set source format
+ rc = IMG_COMP_SET_PARAM(p_comp, QLIB2D_SOURCE_FORMAT, (void *)&src_format);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_DEINIT;
+ }
+
+ // Set destination format
+ rc = IMG_COMP_SET_PARAM(p_comp, QLIB2D_DESTINATION_FORMAT,
+ (void *)&dst_format);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_DEINIT;
+ }
+
+ // Try setting the required mode.
+ rc = IMG_COMP_SET_PARAM(p_comp, QIMG_PARAM_MODE, (void *)&comp_mode);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_DEINIT;
+ }
+
+ // Get the mode to make sure whether the component is really running
+ // in the mode what we set.
+ rc = IMG_COMP_GET_PARAM(p_comp, QIMG_PARAM_MODE,
+ (void *)&lib2d_obj->comp_mode);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto COMP_DEINIT;
+ }
+
+ if (comp_mode != lib2d_obj->comp_mode) {
+ LOGD("Component is running in %d mode",
+ lib2d_obj->comp_mode);
+ }
+
+ *my_obj = (void *)lib2d_obj;
+
+ return MM_LIB2D_SUCCESS;
+
+COMP_DEINIT :
+ rc = IMG_COMP_DEINIT(p_comp);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+COMP_UNLOAD :
+ rc = IMG_COMP_UNLOAD(p_core_ops);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+FREE_LIB2D_OBJ :
+ free(lib2d_obj);
+ return MM_LIB2D_ERR_GENERAL;
+}
+
+/**
+ * Function: mm_lib2d_deinit
+ *
+ * Description: De-Initialization function for Lib2D
+ *
+ * Input parameters:
+ * lib2d_obj_handle - handle tto the lib2d object
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_deinit(void *lib2d_obj_handle)
+{
+ mm_lib2d_obj *lib2d_obj = (mm_lib2d_obj *)lib2d_obj_handle;
+ int rc = IMG_SUCCESS;
+ img_core_ops_t *p_core_ops = &lib2d_obj->core_ops;
+ img_component_ops_t *p_comp = &lib2d_obj->comp;
+
+ rc = IMG_COMP_DEINIT(p_comp);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+ rc = IMG_COMP_UNLOAD(p_core_ops);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+ dlclose(lib2d_obj->img_lib.ptr);
+ free(lib2d_obj);
+
+ return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: mm_lib2d_start_job
+ *
+ * Description: Start executing the job
+ *
+ * Input parameters:
+ * lib2d_obj_handle - handle tto the lib2d object
+ * src_buffer - pointer to the source buffer
+ * dst_buffer - pointer to the destination buffer
+ * jobid - job id of this request
+ * userdata - userdata that will be pass through callback function
+ * cb - callback function that will be called on completion of this job
+ * rotation - rotation to be applied
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_MEMORY
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_start_job(void *lib2d_obj_handle,
+ mm_lib2d_buffer* src_buffer, mm_lib2d_buffer* dst_buffer,
+ int jobid, void *userdata, lib2d_client_cb cb, uint32_t rotation)
+{
+ mm_lib2d_obj *lib2d_obj = (mm_lib2d_obj *)lib2d_obj_handle;
+ int rc = IMG_SUCCESS;
+ img_component_ops_t *p_comp = &lib2d_obj->comp;
+
+ img_frame_t *p_in_frame = malloc(sizeof(img_frame_t));
+ if (p_in_frame == NULL) {
+ return MM_LIB2D_ERR_MEMORY;
+ }
+
+ img_frame_t *p_out_frame = malloc(sizeof(img_frame_t));
+ if (p_out_frame == NULL) {
+ free(p_in_frame);
+ return MM_LIB2D_ERR_MEMORY;
+ }
+
+ img_meta_t *p_meta = malloc(sizeof(img_meta_t));
+ if (p_meta == NULL) {
+ free(p_in_frame);
+ free(p_out_frame);
+ return MM_LIB2D_ERR_MEMORY;
+ }
+
+ lib2d_job_private_info *p_job_info = malloc(sizeof(lib2d_job_private_info));
+ if (p_out_frame == NULL) {
+ free(p_in_frame);
+ free(p_out_frame);
+ free(p_meta);
+ return MM_LIB2D_ERR_MEMORY;
+ }
+
+ memset(p_in_frame, 0x0, sizeof(img_frame_t));
+ memset(p_out_frame, 0x0, sizeof(img_frame_t));
+ memset(p_meta, 0x0, sizeof(img_meta_t));
+ memset(p_job_info, 0x0, sizeof(lib2d_job_private_info));
+
+ // Fill up job info private data structure that can be used in callback to
+ // inform back to the client.
+ p_job_info->jobid = jobid;
+ p_job_info->userdata = userdata;
+ p_job_info->lib2d_client_cb = cb;
+
+ p_in_frame->private_data = (void *)p_job_info;
+ p_out_frame->private_data = (void *)p_job_info;
+
+ // convert the input info into component understandble data structures
+
+ // Prepare Input, output frames
+ lib2d_fill_img_frame(p_in_frame, src_buffer, jobid);
+ lib2d_fill_img_frame(p_out_frame, dst_buffer, jobid);
+
+ p_meta->frame_id = jobid;
+ p_meta->rotation.device_rotation = (int32_t)rotation;
+ p_meta->rotation.frame_rotation = (int32_t)rotation;
+
+ // call set_param to set the source, destination formats
+
+ rc = IMG_COMP_Q_BUF(p_comp, p_in_frame, IMG_IN);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto ERROR;
+ }
+
+ rc = IMG_COMP_Q_BUF(p_comp, p_out_frame, IMG_OUT);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto ERROR;
+ }
+
+ rc = IMG_COMP_Q_META_BUF(p_comp, p_meta);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto ERROR;
+ }
+
+ rc = IMG_COMP_START(p_comp, NULL);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto ERROR;
+ }
+
+ if (lib2d_obj->lib2d_mode == MM_LIB2D_SYNC_MODE) {
+ if (lib2d_obj->comp_mode == IMG_ASYNC_MODE) {
+ LOGD("before wait rc %d", rc);
+ rc = lib2d_obj->img_lib.img_wait_for_completion(&lib2d_obj->cond,
+ &lib2d_obj->mutex, 10000);
+ if (rc != IMG_SUCCESS) {
+ LOGE("rc %d", rc);
+ goto ERROR;
+ }
+ }
+ }
+
+ rc = IMG_COMP_ABORT(p_comp, NULL);
+ if (IMG_ERROR(rc)) {
+ LOGE("comp abort failed %d", rc);
+ return rc;
+ }
+
+ return MM_LIB2D_SUCCESS;
+ERROR:
+ free(p_in_frame);
+ free(p_out_frame);
+ free(p_meta);
+ free(p_job_info);
+
+ return MM_LIB2D_ERR_GENERAL;
+}
+
diff --git a/camera/QCamera2/stack/mm-lib2d-interface/test/Android.mk b/camera/QCamera2/stack/mm-lib2d-interface/test/Android.mk
new file mode 100644
index 0000000..63cd5b7
--- /dev/null
+++ b/camera/QCamera2/stack/mm-lib2d-interface/test/Android.mk
@@ -0,0 +1,37 @@
+#lib2d sample test
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+MM_LIB2D_TEST_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../common.mk
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_LIB2D_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -D_ANDROID_
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+IMGLIB_HEADER_PATH := $(TARGET_OUT_INTERMEDIATES)/include/mm-camera/imglib
+
+LOCAL_C_INCLUDES += \
+ $(IMGLIB_HEADER_PATH) \
+ $(LOCAL_PATH)/../../common \
+ $(LOCAL_PATH)/../inc
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_SRC_FILES := mm_lib2d_test.c
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_MODULE := mm-lib2d-interface-test
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl libmmlib2d_interface
+
+include $(BUILD_EXECUTABLE)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c b/camera/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c
new file mode 100644
index 0000000..908a4a6
--- /dev/null
+++ b/camera/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c
@@ -0,0 +1,543 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <dlfcn.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <sys/time.h>
+
+// Camera dependencies
+#include "img_buffer.h"
+#include "mm_lib2d.h"
+
+
+#define ENABLE_OUTPUT_DUMP 1
+#define ALIGN4K 4032
+#define ALIGN(a, b) (((a) + (b)) & ~(b))
+
+
+/** DUMP_TO_FILE:
+ * @filename: file name
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+ size_t rc = 0; \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ rc = fwrite(p_addr, 1, len, fp); \
+ printf(" ] written size %zu \n", __LINE__, len); \
+ fclose(fp); \
+ } else { \
+ printf(" ] open %s failed \n", __LINE__, filename); \
+ } \
+})
+
+/** DUMP_TO_FILE2:
+ * @filename: file name
+ * @p_addr: address of the buffer
+ * @len: buffer length
+ *
+ * dump the image to the file if the memory is non-contiguous
+ **/
+#define DUMP_TO_FILE2(filename, p_addr1, len1, p_addr2, len2) ({ \
+ size_t rc = 0; \
+ FILE *fp = fopen(filename, "w+"); \
+ if (fp) { \
+ rc = fwrite(p_addr1, 1, len1, fp); \
+ rc = fwrite(p_addr2, 1, len2, fp); \
+ printf(" ] written %zu %zu \n", __LINE__, len1, len2); \
+ fclose(fp); \
+ } else { \
+ printf(" ] open %s failed \n", __LINE__, filename); \
+ } \
+})
+
+/** img_lib_buffert
+ * @ptr: handle to the imglib library
+ * @img_buffer_get: function pointer to img_buffer_get
+ * @img_buffer_release: function pointer to img_buffer_release
+ * @img_buffer_cacheops: function pointer to img_buffer_cacheops
+**/
+typedef struct {
+ void *ptr;
+ int (*img_buffer_get)(img_buf_type_t type, int heapid, int8_t cached, int length,
+ img_mem_handle_t *p_handle);
+ int (*img_buffer_release)(img_mem_handle_t *p_handle);
+ int (*img_buffer_cacheops)(img_mem_handle_t *p_handle, img_cache_ops_t ops,
+ img_mem_alloc_type_t mem_alloc_type);
+} img_lib_buffert;
+
+/** input_yuv_data
+ * @filename: input test filename
+ * @format: format of the input yuv frame
+ * @wdith: wdith of the input yuv frame
+ * @height: height of the input yuv frame
+ * @stride: stride of the input yuv frame
+ * @offset: offset to the yuv data in the input file
+**/
+typedef struct input_yuv_data_t {
+ char filename[512];
+ cam_format_t format;
+ int32_t wdith;
+ int32_t height;
+ int32_t stride;
+ int32_t offset;
+} input_yuv_data;
+
+input_yuv_data input_nv21[] = {
+ {"sample0_768x512.yuv", CAM_FORMAT_YUV_420_NV21, 768, 512, 768, 0},
+ {"sample1_3200x2400.yuv", CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+ {"sample2_1920x1080.yuv", CAM_FORMAT_YUV_420_NV21, 1920, 1080, 1920, 0},
+ {"sample3_3200x2400.yuv", CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+ {"sample4_4208x3120.yuv", CAM_FORMAT_YUV_420_NV21, 4208, 3120, 4208, 0},
+ {"sample5_1984x2592.yuv", CAM_FORMAT_YUV_420_NV21, 1984, 2592, 1984, 0},
+ {"sample6_4000_3000.yuv", CAM_FORMAT_YUV_420_NV21, 4000, 3000, 4000, 0},
+ {"sample7_3200_2400.yuv", CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+ {"sample8_3008_4000.yuv", CAM_FORMAT_YUV_420_NV21, 3008, 4000, 3008, 0},
+ {"sample9_5312x2988.yuv", CAM_FORMAT_YUV_420_NV21, 5312, 2988, 5312, 0},
+ {"sample10_4128x3096.yuv", CAM_FORMAT_YUV_420_NV21, 4128, 3096, 4128, 0},
+ {"sample11_4208x3120.yuv", CAM_FORMAT_YUV_420_NV21, 4208, 3120, 4208, 0},
+ {"sample12_3200x2400.yuv", CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+ {"sample13_width_1080_height_1440_stride_1088.yuv", CAM_FORMAT_YUV_420_NV21, 1080, 1440, 1088, 0},
+ {"sample14_width_1080_height_1920_stride_1088.yuv", CAM_FORMAT_YUV_420_NV21, 1080, 1920, 1088, 0},
+ {"sample15_width_1944_height_2592_stride_1984.yuv", CAM_FORMAT_YUV_420_NV21, 1944, 2592, 1984, 0},
+ {"sample16_width_3000_height_4000_stride_3008.yuv", CAM_FORMAT_YUV_420_NV21, 3000, 4000, 3008, 0},
+ {"sample17_width_3120_height_4208_stride_3136.yuv", CAM_FORMAT_YUV_420_NV21, 3120, 4208, 3136, 0},
+ {"sample18_width_3200_height_2400_stride_3200.yuv", CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+ {"sample19_width_1944_height_2592_stride_1984.yuv", CAM_FORMAT_YUV_420_NV21, 1944, 2592, 1984, 0},
+};
+
+// assuming buffer format is always ARGB
+void lib2d_dump_tga(void *addr, cam_format_t format, int width,
+ int height, int stride, char *fname)
+{
+ int i, j;
+ FILE *f;
+ unsigned char *pb = (unsigned char *)addr;
+ uint32_t *pd = (uint32_t *)addr;
+ int bpp = 32;
+
+ f = fopen(fname, "wb");
+ if (f) {
+ // header
+ fprintf(f, "%c%c%c%c", 0, 0, 2, 0);
+ fprintf(f, "%c%c%c%c", 0, 0, 0, 0);
+ fprintf(f, "%c%c%c%c", 0, 0, 0, 0);
+ fprintf(f, "%c%c%c%c", width & 0xff, width >> 8, height & 0xff, height >> 8);
+ fprintf(f, "%c%c", bpp, 32);
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ fprintf(f, "%c%c%c%c",
+ pd[(i*stride>>2)+j] & 0xff, // b
+ (pd[(i*stride>>2)+j] >> 8) & 0xff, // g
+ (pd[(i*stride>>2)+j] >> 16) & 0xff, // r
+ (pd[(i*stride>>2)+j] >> 24) & 0xff); // a
+ }
+ }
+ fclose(f);
+ }
+}
+
+/**
+ * Function: lib2d_test_client_cb
+ *
+ * Description: Callback that is called on completion of requested job.
+ *
+ * Input parameters:
+ * userdata - App userdata
+ * jobid - job id that is finished execution
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_test_client_cb(void *userdata, int jobid)
+{
+ printf("%s %d, jobid=%d \n", __LINE__, jobid);
+ return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: lib2d_test_load_input_yuv_data
+ *
+ * Description: Loads yuv data from input file.
+ *
+ * Input parameters:
+ * fileName - input yuv filename
+ * offset - offset to the yuv data in the input file
+ * y_size - y plane size in input yuv file
+ * crcb_size - crcb plane size in input yuv file
+ * crcb_offset - crcb offset in the memory at
+ * which crcb data need to be loaded
+ * addr - y plane memory address where y plane
+ * data need to be loaded.
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_test_load_input_yuv_data(char *fileName, int offset,
+ int32_t y_size, int32_t crcb_size, int32_t crcb_offset,
+ void *addr)
+{
+ size_t i;
+ FILE *fp = 0;
+ void *y_ptr = addr;
+ void *crcb_ptr = (uint8_t *)addr + crcb_offset;
+
+ printf("y_ptr=%p, crcb_ptr=%p \n", y_ptr, crcb_ptr);
+
+ fp = fopen(fileName, "rb");
+ if(fp) {
+ if(offset) {
+ fseek(fp, offset, SEEK_SET);
+ }
+ i = fread(y_ptr, 1, y_size, fp);
+ i = fread(crcb_ptr, 1, crcb_size, fp);
+
+ fclose( fp );
+ } else {
+ printf("failed to open file %s \n", fileName);
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+ return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: lib2d_test_load_input_yuv_data
+ *
+ * Description: Loads yuv data from input file.
+ *
+ * Input parameters:
+ * fileName - input yuv filename
+ * offset - offset to the yuv data in the input file
+ * input_yuv_stride - y plane stride in input yuv file
+ * y_plane_stride - y plane stride in buffer memory
+ * height - height of yuv image
+ * crcb_offset - crcb offset in the memory at
+ * which crcb data need to be loaded
+ * addr - y plane memory address where y plane
+ * data need to be loaded.
+ *
+ * Return values:
+ * MM_LIB2D_SUCCESS
+ * MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_test_load_input_yuv_data_linebyline(char *fileName,
+ int offset, int32_t input_yuv_stride, int32_t y_plane_stride,
+ int32_t height, int32_t crcb_offset, void *addr)
+{
+ size_t i;
+ FILE *fp = 0;
+ void *y_ptr = addr;
+ void *crcb_ptr = (uint8_t *)addr + crcb_offset;
+
+ printf("y_ptr=%p, crcb_ptr=%p \n", y_ptr, crcb_ptr);
+
+ fp = fopen(fileName, "rb");
+ if(fp) {
+ if(offset) {
+ fseek(fp, offset, SEEK_SET);
+ }
+ if (input_yuv_stride == y_plane_stride) {
+ //load y plane
+ i = fread(y_ptr, 1, (input_yuv_stride * height), fp);
+ // load UV plane
+ i = fread(crcb_ptr, 1, (input_yuv_stride * height / 2), fp);
+ } else {
+ int line = 0;
+ // load Y plane
+ for (line = 0;line < height; line++) {
+ i = fread(y_ptr, 1, input_yuv_stride, fp);
+ y_ptr = (void *)((uint8_t *)y_ptr + y_plane_stride);
+ }
+ for (line = 0;line < height; line++) {
+ i = fread(crcb_ptr, 1, input_yuv_stride, fp);
+ crcb_ptr = (void *)((uint8_t *)crcb_ptr + y_plane_stride);
+ }
+ }
+
+ fclose( fp );
+ } else {
+ printf("failed to open file %s \n", fileName);
+ return MM_LIB2D_ERR_GENERAL;
+ }
+
+ return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: main
+ *
+ * Description: main function for execution
+ *
+ * Input parameters:
+ * argc - no.of input arguments
+ * argv - list of arguments
+ *
+ * Return values:
+ * 0 on success
+ * -1 on failure
+ *
+ * Notes: none
+ **/
+int main(int32_t argc, const char * argv[])
+{
+ void *lib2d_handle = NULL;
+ lib2d_error lib2d_err = MM_LIB2D_SUCCESS;
+ mm_lib2d_buffer src_buffer = {0};
+ mm_lib2d_buffer dst_buffer = {0};
+ int8_t ret = IMG_SUCCESS;
+ int32_t width = 0;
+ int32_t height = 0;
+ int32_t input_yuv_stride = 0;
+ int32_t stride = 0;
+ int32_t y_plane_stride = 0;
+ int32_t crcb_plane_stride = 0;
+ int32_t y_plane_size = 0;
+ int32_t y_plane_size_align = 0;
+ int32_t crcb_plane_size = 0;
+ int32_t yuv_size = 0;
+ int32_t rgb_size = 0;
+ img_mem_handle_t m_yuv_memHandle = { 0 };
+ img_mem_handle_t m_rgb_memHandle = { 0 };
+ char filename_in[512] = { 0 };
+ char filename_out[512] = { 0 };
+ char filename_raw[512] = { 0 };
+ int32_t offset = 0;
+ unsigned int total_tests = 1;
+ cam_format_t format = CAM_FORMAT_YUV_420_NV21;
+ unsigned int index;
+ const char *filename;
+
+ // Open Imglib library and get the function pointers for
+ // buffer allocation, free, cacheops
+ img_lib_buffert img_lib;
+ img_lib.ptr = dlopen("libmmcamera_imglib.so", RTLD_NOW);
+ if (!img_lib.ptr) {
+ printf("%s ERROR: couldn't dlopen libmmcamera_imglib.so: %s",
+ dlerror());
+ return -1;
+ }
+
+ /* Get function pointer for functions to allocate ion memory */
+ *(void **)&img_lib.img_buffer_get =
+ dlsym(img_lib.ptr, "img_buffer_get");
+ *(void **)&img_lib.img_buffer_release =
+ dlsym(img_lib.ptr, "img_buffer_release");
+ *(void **)&img_lib.img_buffer_cacheops =
+ dlsym(img_lib.ptr, "img_buffer_cacheops");
+
+ /* Validate function pointers */
+ if ((img_lib.img_buffer_get == NULL) ||
+ (img_lib.img_buffer_release == NULL) ||
+ (img_lib.img_buffer_cacheops == NULL)) {
+ printf(" ERROR mapping symbols from libmmcamera_imglib.so");
+ dlclose(img_lib.ptr);
+ return -1;
+ }
+
+ lib2d_err = mm_lib2d_init(MM_LIB2D_SYNC_MODE, CAM_FORMAT_YUV_420_NV21,
+ CAM_FORMAT_8888_ARGB, &lib2d_handle);
+ if ((lib2d_err != MM_LIB2D_SUCCESS) || (lib2d_handle == NULL)) {
+ return -1;
+ }
+
+ bool run_default = FALSE;
+
+ if ( argc == 7) {
+ filename = argv[1];
+ width = (uint32_t)atoi(argv[2]);
+ height = (uint32_t)atoi(argv[3]);
+ input_yuv_stride = (uint32_t)atoi(argv[4]);
+ offset = (uint32_t)atoi(argv[5]);
+ format = (uint32_t)atoi(argv[6]);
+ run_default = TRUE;
+ printf("Running user provided conversion \n");
+ }
+ else {
+ total_tests = sizeof(input_nv21)/sizeof(input_yuv_data);
+ printf("usage: <binary> <filname> <width> <height> "
+ "<stride> <offset> <format> \n");
+ }
+
+ for (index = 0; index < total_tests; index++)
+ {
+ if(run_default == FALSE) {
+ filename = input_nv21[index].filename;
+ width = input_nv21[index].wdith;
+ height = input_nv21[index].height;
+ input_yuv_stride = input_nv21[index].stride;
+ offset = input_nv21[index].offset;
+ format = input_nv21[index].format;
+ }
+
+ snprintf(filename_in, 512, "/data/lib2d/input/%s", filename);
+ snprintf(filename_out, 512, "/data/lib2d/output/%s.tga", filename);
+ snprintf(filename_raw, 512, "/data/lib2d/output/%s.rgba", filename);
+
+ printf("-----------------Running test=%d/%d------------------------- \n",
+ index+1, total_tests);
+ printf("filename=%s, full path=%s, width=%d, height=%d, stride=%d \n",
+ filename, filename_in, width, height, stride);
+
+ // Allocate NV12 buffer
+ y_plane_stride = ALIGN(width, 32);
+ y_plane_size = y_plane_stride * height;
+ y_plane_size_align = ALIGN(y_plane_size, ALIGN4K);
+ crcb_plane_stride = y_plane_stride;
+ crcb_plane_size = crcb_plane_stride * height / 2;
+ yuv_size = y_plane_size_align + crcb_plane_size;
+ ret = img_lib.img_buffer_get(IMG_BUFFER_ION_IOMMU, -1, TRUE,
+ yuv_size, &m_yuv_memHandle);
+ if (ret != IMG_SUCCESS) {
+ printf(" ] Error, img buf get failed \n");
+ goto deinit;
+ }
+
+ printf("%s %d yuv buffer properties : w=%d, h=%d, y_stride=%d, "
+ "crcb_stride=%d, y_size=%d, crcb_size=%d, yuv_size=%d, "
+ "crcb_offset=%d \n",
+ __LINE__,
+ width, height, y_plane_stride, crcb_plane_stride, y_plane_size,
+ crcb_plane_size, yuv_size, y_plane_size_align);
+ printf("%s %d yuv buffer properties : fd=%d, ptr=%p, size=%d \n",
+ __LINE__, m_yuv_memHandle.fd, m_yuv_memHandle.vaddr,
+ m_yuv_memHandle.length);
+
+ // Allocate ARGB buffer
+ stride = width * 4;
+ stride = ALIGN(stride, 32);
+ rgb_size = stride * height;
+ ret = img_lib.img_buffer_get(IMG_BUFFER_ION_IOMMU, -1, TRUE,
+ rgb_size, &m_rgb_memHandle);
+ if (ret != IMG_SUCCESS) {
+ printf(" ] Error, img buf get failed");
+ img_lib.img_buffer_release(&m_yuv_memHandle);
+ goto deinit;
+ }
+
+ printf("%s %d rgb buffer properties : w=%d, h=%d, stride=%d, size=%d \n",
+ __LINE__, width, height, stride, rgb_size);
+ printf("%s %d rgb buffer properties : fd=%d, ptr=%p, size=%d \n",
+ __LINE__, m_rgb_memHandle.fd, m_rgb_memHandle.vaddr,
+ m_rgb_memHandle.length);
+
+#if 0
+ lib2d_err = lib2d_test_load_input_yuv_data(filename_in, offset,
+ (input_yuv_stride * height), (input_yuv_stride * height / 2), y_plane_size_align,
+ m_yuv_memHandle.vaddr);
+ if (lib2d_err != MM_LIB2D_SUCCESS) {
+ printf(" ] Error loading the input buffer \n");
+ goto release;
+ }
+#else
+ lib2d_err = lib2d_test_load_input_yuv_data_linebyline(filename_in, offset,
+ input_yuv_stride, y_plane_stride,height, y_plane_size_align,
+ m_yuv_memHandle.vaddr);
+ if (lib2d_err != MM_LIB2D_SUCCESS) {
+ printf(" ] Error loading the input buffer \n");
+ goto release;
+ }
+#endif
+ // Setup source buffer
+ src_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_YUV;
+ src_buffer.yuv_buffer.fd = m_yuv_memHandle.fd;
+ src_buffer.yuv_buffer.format = format;
+ src_buffer.yuv_buffer.width = width;
+ src_buffer.yuv_buffer.height = height;
+ src_buffer.yuv_buffer.plane0 = m_yuv_memHandle.vaddr;
+ src_buffer.yuv_buffer.stride0 = y_plane_stride;
+ src_buffer.yuv_buffer.plane1 = (int8_t *)m_yuv_memHandle.vaddr +
+ y_plane_size_align;
+ src_buffer.yuv_buffer.stride1 = crcb_plane_stride;
+
+ // Setup dst buffer
+ dst_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_RGB;
+ dst_buffer.rgb_buffer.fd = m_rgb_memHandle.fd;
+ dst_buffer.rgb_buffer.format = CAM_FORMAT_8888_ARGB;
+ dst_buffer.rgb_buffer.width = width;
+ dst_buffer.rgb_buffer.height = height;
+ dst_buffer.rgb_buffer.buffer = m_rgb_memHandle.vaddr;
+ dst_buffer.rgb_buffer.stride = stride;
+
+ img_lib.img_buffer_cacheops(&m_yuv_memHandle,
+ IMG_CACHE_CLEAN_INV, IMG_INTERNAL);
+
+ lib2d_err = mm_lib2d_start_job(lib2d_handle, &src_buffer, &dst_buffer,
+ index, NULL, lib2d_test_client_cb, 0);
+ if (lib2d_err != MM_LIB2D_SUCCESS) {
+ printf(" ] Error in mm_lib2d_start_job \n");
+ goto release;
+ }
+
+ img_lib.img_buffer_cacheops(&m_rgb_memHandle,
+ IMG_CACHE_CLEAN_INV, IMG_INTERNAL);
+
+#ifdef ENABLE_OUTPUT_DUMP
+ // Dump output files
+ // snprintf(filename_in, 512, "/data/lib2d/output/%s", filename);
+ // DUMP_TO_FILE2(filename_in, src_buffer.yuv_buffer.plane0, y_plane_size, src_buffer.yuv_buffer.plane1, crcb_plane_size);
+ // DUMP_TO_FILE(filename_raw, dst_buffer.rgb_buffer.buffer, rgb_size);
+ printf("Dumping output file %s \n", filename_out);
+ lib2d_dump_tga(dst_buffer.rgb_buffer.buffer, 1,
+ width, height, stride, filename_out);
+#endif
+
+ img_lib.img_buffer_release(&m_rgb_memHandle);
+ img_lib.img_buffer_release(&m_yuv_memHandle);
+ }
+
+ mm_lib2d_deinit(lib2d_handle);
+
+ return 0;
+
+release:
+ img_lib.img_buffer_release(&m_rgb_memHandle);
+ img_lib.img_buffer_release(&m_yuv_memHandle);
+deinit:
+ mm_lib2d_deinit(lib2d_handle);
+ printf("%s %d some error happened, tests completed = %d/%d \n",
+ __LINE__, index - 1, total_tests);
+ return -1;
+}
+
+