fix laneLine, jerkLeadFactor, OS04C10: use IFE downscaler for road cameras (#35023)
This commit is contained in:
parent
0d40043638
commit
df5d58217a
@ -39,7 +39,6 @@ def max_abs(a, b):
|
|||||||
|
|
||||||
class LanePlanner:
|
class LanePlanner:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.ll_t = np.zeros((TRAJECTORY_SIZE,))
|
|
||||||
self.ll_x = np.zeros((TRAJECTORY_SIZE,))
|
self.ll_x = np.zeros((TRAJECTORY_SIZE,))
|
||||||
self.lll_y = np.zeros((TRAJECTORY_SIZE,))
|
self.lll_y = np.zeros((TRAJECTORY_SIZE,))
|
||||||
self.rll_y = np.zeros((TRAJECTORY_SIZE,))
|
self.rll_y = np.zeros((TRAJECTORY_SIZE,))
|
||||||
@ -78,8 +77,7 @@ class LanePlanner:
|
|||||||
lane_lines = md.laneLines
|
lane_lines = md.laneLines
|
||||||
edges = md.roadEdges
|
edges = md.roadEdges
|
||||||
|
|
||||||
if len(lane_lines) >= 4 and len(lane_lines[0].t) == TRAJECTORY_SIZE:
|
if len(lane_lines) >= 4 and len(lane_lines[0].x) == TRAJECTORY_SIZE:
|
||||||
self.ll_t = (np.array(lane_lines[1].t) + np.array(lane_lines[2].t))/2
|
|
||||||
# left and right ll x is the same
|
# left and right ll x is the same
|
||||||
self.ll_x = lane_lines[1].x
|
self.ll_x = lane_lines[1].x
|
||||||
self.lll_y = np.array(lane_lines[1].y)
|
self.lll_y = np.array(lane_lines[1].y)
|
||||||
@ -233,15 +231,8 @@ class LanePlanner:
|
|||||||
laneline_active = False
|
laneline_active = False
|
||||||
if self.lanefull_mode and self.d_prob > 0.3:
|
if self.lanefull_mode and self.d_prob > 0.3:
|
||||||
laneline_active = True
|
laneline_active = True
|
||||||
use_dist_mode = True ## 아무리생각해봐도.. 같은 방법인듯...
|
lane_path_y_interp = np.interp(path_xyz[:,0] + v_ego * adjustLaneTime*0.01, self.ll_x, lane_path_y)
|
||||||
if use_dist_mode:
|
path_xyz[:,1] = self.d_prob * lane_path_y_interp + (1.0 - self.d_prob) * path_xyz[:,1]
|
||||||
lane_path_y_interp = np.interp(path_xyz[:,0] + v_ego * adjustLaneTime*0.01, self.ll_x, lane_path_y)
|
|
||||||
path_xyz[:,1] = self.d_prob * lane_path_y_interp + (1.0 - self.d_prob) * path_xyz[:,1]
|
|
||||||
else:
|
|
||||||
safe_idxs = np.isfinite(self.ll_t)
|
|
||||||
if safe_idxs[0]:
|
|
||||||
lane_path_y_interp = np.interp(path_t * (1.0 + adjustLaneTime*0.01), self.ll_t[safe_idxs], lane_path_y[safe_idxs])
|
|
||||||
path_xyz[:,1] = self.d_prob * lane_path_y_interp + (1.0 - self.d_prob) * path_xyz[:,1]
|
|
||||||
|
|
||||||
|
|
||||||
path_xyz[:, 1] += (CAMERA_OFFSET + self.lane_offset_filtered.x)
|
path_xyz[:, 1] += (CAMERA_OFFSET + self.lane_offset_filtered.x)
|
||||||
|
@ -371,13 +371,13 @@ class LongitudinalMpc:
|
|||||||
j_lead = np.clip(j_lead, -2., 2.)
|
j_lead = np.clip(j_lead, -2., 2.)
|
||||||
|
|
||||||
j_lead *= carrot.j_lead_factor
|
j_lead *= carrot.j_lead_factor
|
||||||
if j_lead > 0 and a_lead < 0 and (v_lead - v_ego) > 0 and x_lead > self.desired_distance:
|
#if j_lead > 0 and a_lead < 0 and (v_lead - v_ego) > 0 and x_lead > self.desired_distance:
|
||||||
|
if j_lead > 0 and a_lead < 0 and x_lead > self.desired_distance:
|
||||||
a_lead += min(j_lead, 0.5)
|
a_lead += min(j_lead, 0.5)
|
||||||
a_lead = min(a_lead, 0.0)
|
a_lead = min(a_lead, 0.0)
|
||||||
|
|
||||||
if j_lead < 0 and a_lead < -0.5:
|
if j_lead < 0 and a_lead < -0.5:
|
||||||
drop = min(abs(j_lead), 0.5)
|
a_lead -= min(abs(j_lead)*1.5, 0.8)
|
||||||
a_lead -= drop
|
|
||||||
|
|
||||||
lead_xv = self.extrapolate_lead(x_lead, v_lead, a_lead, a_lead_tau)
|
lead_xv = self.extrapolate_lead(x_lead, v_lead, a_lead, a_lead_tau)
|
||||||
#lead_xv = self.extrapolate_lead_with_j(x_lead, v_lead, a_lead, j_lead, a_lead_tau)
|
#lead_xv = self.extrapolate_lead_with_j(x_lead, v_lead, a_lead, j_lead, a_lead_tau)
|
||||||
|
@ -26,9 +26,6 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, SpectraCamera *
|
|||||||
LOGD("allocated %d CL buffers", frame_buf_count);
|
LOGD("allocated %d CL buffers", frame_buf_count);
|
||||||
}
|
}
|
||||||
|
|
||||||
out_img_width = sensor->frame_width;
|
|
||||||
out_img_height = sensor->hdr_offset > 0 ? (sensor->frame_height - sensor->hdr_offset) / 2 : sensor->frame_height;
|
|
||||||
|
|
||||||
// the encoder HW tells us the size it wants after setting it up.
|
// the encoder HW tells us the size it wants after setting it up.
|
||||||
// TODO: VENUS_BUFFER_SIZE should give the size, but it's too small. dependent on encoder settings?
|
// TODO: VENUS_BUFFER_SIZE should give the size, but it's too small. dependent on encoder settings?
|
||||||
size_t nv12_size = (out_img_width <= 1344 ? 2900 : 2346)*cam->stride;
|
size_t nv12_size = (out_img_width <= 1344 ? 2900 : 2346)*cam->stride;
|
||||||
|
@ -32,7 +32,7 @@ public:
|
|||||||
VisionBuf *cur_yuv_buf;
|
VisionBuf *cur_yuv_buf;
|
||||||
VisionBuf *cur_camera_buf;
|
VisionBuf *cur_camera_buf;
|
||||||
std::unique_ptr<VisionBuf[]> camera_bufs_raw;
|
std::unique_ptr<VisionBuf[]> camera_bufs_raw;
|
||||||
int out_img_width, out_img_height;
|
uint32_t out_img_width, out_img_height;
|
||||||
|
|
||||||
CameraBuf() = default;
|
CameraBuf() = default;
|
||||||
~CameraBuf();
|
~CameraBuf();
|
||||||
|
@ -73,7 +73,7 @@ void CameraState::init(VisionIpcServer *v, cl_device_id device_id, cl_context ct
|
|||||||
|
|
||||||
if (!camera.enabled) return;
|
if (!camera.enabled) return;
|
||||||
|
|
||||||
fl_pix = camera.cc.focal_len / camera.sensor->pixel_size_mm;
|
fl_pix = camera.cc.focal_len / camera.sensor->pixel_size_mm / camera.sensor->out_scale;
|
||||||
set_exposure_rect();
|
set_exposure_rect();
|
||||||
|
|
||||||
dc_gain_weight = camera.sensor->dc_gain_min_weight;
|
dc_gain_weight = camera.sensor->dc_gain_min_weight;
|
||||||
@ -107,10 +107,10 @@ void CameraState::set_exposure_rect() {
|
|||||||
float fl_ref = ae_target.second;
|
float fl_ref = ae_target.second;
|
||||||
|
|
||||||
ae_xywh = (Rect){
|
ae_xywh = (Rect){
|
||||||
std::max(0, camera.buf.out_img_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
|
std::max(0, (int)camera.buf.out_img_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
|
||||||
std::max(0, camera.buf.out_img_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))),
|
std::max(0, (int)camera.buf.out_img_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))),
|
||||||
std::min((int)(fl_pix / fl_ref * xywh_ref.w), camera.buf.out_img_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
|
std::min((int)(fl_pix / fl_ref * xywh_ref.w), (int)camera.buf.out_img_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
|
||||||
std::min((int)(fl_pix / fl_ref * xywh_ref.h), camera.buf.out_img_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y)))
|
std::min((int)(fl_pix / fl_ref * xywh_ref.h), (int)camera.buf.out_img_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y)))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ int build_update(uint8_t *dst, const CameraConfig cam, const SensorInfo *s, std:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int build_initial_config(uint8_t *dst, const CameraConfig cam, const SensorInfo *s, std::vector<uint32_t> &patches) {
|
int build_initial_config(uint8_t *dst, const CameraConfig cam, const SensorInfo *s, std::vector<uint32_t> &patches, uint32_t out_width, uint32_t out_height) {
|
||||||
uint8_t *start = dst;
|
uint8_t *start = dst;
|
||||||
|
|
||||||
// start with the every frame config
|
// start with the every frame config
|
||||||
@ -185,12 +185,12 @@ int build_initial_config(uint8_t *dst, const CameraConfig cam, const SensorInfo
|
|||||||
// output size/scaling
|
// output size/scaling
|
||||||
dst += write_cont(dst, 0xa3c, {
|
dst += write_cont(dst, 0xa3c, {
|
||||||
0x00000003,
|
0x00000003,
|
||||||
((s->frame_width - 1) << 16) | (s->frame_width - 1),
|
((out_width - 1) << 16) | (s->frame_width - 1),
|
||||||
0x30036666,
|
0x30036666,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
s->frame_width - 1,
|
s->frame_width - 1,
|
||||||
((s->frame_height - 1) << 16) | (s->frame_height - 1),
|
((out_height - 1) << 16) | (s->frame_height - 1),
|
||||||
0x30036666,
|
0x30036666,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
@ -198,12 +198,12 @@ int build_initial_config(uint8_t *dst, const CameraConfig cam, const SensorInfo
|
|||||||
});
|
});
|
||||||
dst += write_cont(dst, 0xa68, {
|
dst += write_cont(dst, 0xa68, {
|
||||||
0x00000003,
|
0x00000003,
|
||||||
((s->frame_width/2 - 1) << 16) | (s->frame_width - 1),
|
((out_width / 2 - 1) << 16) | (s->frame_width - 1),
|
||||||
0x3006cccc,
|
0x3006cccc,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
s->frame_width - 1,
|
s->frame_width - 1,
|
||||||
((s->frame_height/2 - 1) << 16) | (s->frame_height - 1),
|
((out_height / 2 - 1) << 16) | (s->frame_height - 1),
|
||||||
0x3006cccc,
|
0x3006cccc,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
0x00000000,
|
0x00000000,
|
||||||
@ -212,12 +212,12 @@ int build_initial_config(uint8_t *dst, const CameraConfig cam, const SensorInfo
|
|||||||
|
|
||||||
// cropping
|
// cropping
|
||||||
dst += write_cont(dst, 0xe10, {
|
dst += write_cont(dst, 0xe10, {
|
||||||
s->frame_height - 1,
|
out_height - 1,
|
||||||
s->frame_width - 1,
|
out_width - 1,
|
||||||
});
|
});
|
||||||
dst += write_cont(dst, 0xe30, {
|
dst += write_cont(dst, 0xe30, {
|
||||||
s->frame_height/2 - 1,
|
out_height / 2 - 1,
|
||||||
s->frame_width - 1,
|
out_width - 1,
|
||||||
});
|
});
|
||||||
dst += write_cont(dst, 0xe18, {
|
dst += write_cont(dst, 0xe18, {
|
||||||
0x0ff00000,
|
0x0ff00000,
|
||||||
|
@ -281,18 +281,21 @@ void SpectraCamera::camera_open(VisionIpcServer *v, cl_device_id device_id, cl_c
|
|||||||
|
|
||||||
if (!enabled) return;
|
if (!enabled) return;
|
||||||
|
|
||||||
|
buf.out_img_width = sensor->frame_width / sensor->out_scale;
|
||||||
|
buf.out_img_height = (sensor->hdr_offset > 0 ? (sensor->frame_height - sensor->hdr_offset) / 2 : sensor->frame_height) / sensor->out_scale;
|
||||||
|
|
||||||
// size is driven by all the HW that handles frames,
|
// size is driven by all the HW that handles frames,
|
||||||
// the video encoder has certain alignment requirements in this case
|
// the video encoder has certain alignment requirements in this case
|
||||||
stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, sensor->frame_width);
|
stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, buf.out_img_width);
|
||||||
y_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, sensor->frame_height);
|
y_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, buf.out_img_height);
|
||||||
uv_height = VENUS_UV_SCANLINES(COLOR_FMT_NV12, sensor->frame_height);
|
uv_height = VENUS_UV_SCANLINES(COLOR_FMT_NV12, buf.out_img_height);
|
||||||
uv_offset = stride*y_height;
|
uv_offset = stride*y_height;
|
||||||
yuv_size = uv_offset + stride*uv_height;
|
yuv_size = uv_offset + stride*uv_height;
|
||||||
if (cc.output_type != ISP_RAW_OUTPUT) {
|
if (cc.output_type != ISP_RAW_OUTPUT) {
|
||||||
uv_offset = ALIGNED_SIZE(uv_offset, 0x1000);
|
uv_offset = ALIGNED_SIZE(uv_offset, 0x1000);
|
||||||
yuv_size = uv_offset + ALIGNED_SIZE(stride*uv_height, 0x1000);
|
yuv_size = uv_offset + ALIGNED_SIZE(stride*uv_height, 0x1000);
|
||||||
}
|
}
|
||||||
assert(stride == VENUS_UV_STRIDE(COLOR_FMT_NV12, sensor->frame_width));
|
assert(stride == VENUS_UV_STRIDE(COLOR_FMT_NV12, buf.out_img_width));
|
||||||
assert(y_height/2 == uv_height);
|
assert(y_height/2 == uv_height);
|
||||||
|
|
||||||
open = true;
|
open = true;
|
||||||
@ -645,14 +648,14 @@ void SpectraCamera::config_bps(int idx, int request_id) {
|
|||||||
io_cfg[1].mem_handle[0] = buf_handle_yuv[idx];
|
io_cfg[1].mem_handle[0] = buf_handle_yuv[idx];
|
||||||
io_cfg[1].mem_handle[1] = buf_handle_yuv[idx];
|
io_cfg[1].mem_handle[1] = buf_handle_yuv[idx];
|
||||||
io_cfg[1].planes[0] = (struct cam_plane_cfg){
|
io_cfg[1].planes[0] = (struct cam_plane_cfg){
|
||||||
.width = sensor->frame_width,
|
.width = buf.out_img_width,
|
||||||
.height = sensor->frame_height,
|
.height = buf.out_img_height,
|
||||||
.plane_stride = stride,
|
.plane_stride = stride,
|
||||||
.slice_height = y_height,
|
.slice_height = y_height,
|
||||||
};
|
};
|
||||||
io_cfg[1].planes[1] = (struct cam_plane_cfg){
|
io_cfg[1].planes[1] = (struct cam_plane_cfg){
|
||||||
.width = sensor->frame_width,
|
.width = buf.out_img_width,
|
||||||
.height = sensor->frame_height/2,
|
.height = buf.out_img_height / 2,
|
||||||
.plane_stride = stride,
|
.plane_stride = stride,
|
||||||
.slice_height = uv_height,
|
.slice_height = uv_height,
|
||||||
};
|
};
|
||||||
@ -737,7 +740,7 @@ void SpectraCamera::config_ife(int idx, int request_id, bool init) {
|
|||||||
bool is_raw = cc.output_type != ISP_IFE_PROCESSED;
|
bool is_raw = cc.output_type != ISP_IFE_PROCESSED;
|
||||||
if (!is_raw) {
|
if (!is_raw) {
|
||||||
if (init) {
|
if (init) {
|
||||||
buf_desc[0].length = build_initial_config((unsigned char*)ife_cmd.ptr + buf_desc[0].offset, cc, sensor.get(), patches);
|
buf_desc[0].length = build_initial_config((unsigned char*)ife_cmd.ptr + buf_desc[0].offset, cc, sensor.get(), patches, buf.out_img_width, buf.out_img_height);
|
||||||
} else {
|
} else {
|
||||||
buf_desc[0].length = build_update((unsigned char*)ife_cmd.ptr + buf_desc[0].offset, cc, sensor.get(), patches);
|
buf_desc[0].length = build_update((unsigned char*)ife_cmd.ptr + buf_desc[0].offset, cc, sensor.get(), patches);
|
||||||
}
|
}
|
||||||
@ -844,14 +847,14 @@ void SpectraCamera::config_ife(int idx, int request_id, bool init) {
|
|||||||
io_cfg[0].mem_handle[0] = buf_handle_yuv[idx];
|
io_cfg[0].mem_handle[0] = buf_handle_yuv[idx];
|
||||||
io_cfg[0].mem_handle[1] = buf_handle_yuv[idx];
|
io_cfg[0].mem_handle[1] = buf_handle_yuv[idx];
|
||||||
io_cfg[0].planes[0] = (struct cam_plane_cfg){
|
io_cfg[0].planes[0] = (struct cam_plane_cfg){
|
||||||
.width = sensor->frame_width,
|
.width = buf.out_img_width,
|
||||||
.height = sensor->frame_height,
|
.height = buf.out_img_height,
|
||||||
.plane_stride = stride,
|
.plane_stride = stride,
|
||||||
.slice_height = y_height,
|
.slice_height = y_height,
|
||||||
};
|
};
|
||||||
io_cfg[0].planes[1] = (struct cam_plane_cfg){
|
io_cfg[0].planes[1] = (struct cam_plane_cfg){
|
||||||
.width = sensor->frame_width,
|
.width = buf.out_img_width,
|
||||||
.height = sensor->frame_height/2,
|
.height = buf.out_img_height / 2,
|
||||||
.plane_stride = stride,
|
.plane_stride = stride,
|
||||||
.slice_height = uv_height,
|
.slice_height = uv_height,
|
||||||
};
|
};
|
||||||
@ -993,6 +996,9 @@ bool SpectraCamera::openSensor() {
|
|||||||
LOGD("-- Probing sensor %d", cc.camera_num);
|
LOGD("-- Probing sensor %d", cc.camera_num);
|
||||||
|
|
||||||
auto init_sensor_lambda = [this](SensorInfo *s) {
|
auto init_sensor_lambda = [this](SensorInfo *s) {
|
||||||
|
if (s->image_sensor == cereal::FrameData::ImageSensor::OS04C10 && cc.output_type == ISP_IFE_PROCESSED) {
|
||||||
|
((OS04C10*)s)->ife_downscale_configure();
|
||||||
|
}
|
||||||
sensor.reset(s);
|
sensor.reset(s);
|
||||||
return (sensors_init() == 0);
|
return (sensors_init() == 0);
|
||||||
};
|
};
|
||||||
@ -1065,8 +1071,8 @@ void SpectraCamera::configISP() {
|
|||||||
.data[0] = (struct cam_isp_out_port_info){
|
.data[0] = (struct cam_isp_out_port_info){
|
||||||
.res_type = CAM_ISP_IFE_OUT_RES_FULL,
|
.res_type = CAM_ISP_IFE_OUT_RES_FULL,
|
||||||
.format = CAM_FORMAT_NV12,
|
.format = CAM_FORMAT_NV12,
|
||||||
.width = sensor->frame_width,
|
.width = buf.out_img_width,
|
||||||
.height = sensor->frame_height + sensor->extra_height,
|
.height = buf.out_img_height + sensor->extra_height,
|
||||||
.comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0,
|
.comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@ -1141,8 +1147,8 @@ void SpectraCamera::configICP() {
|
|||||||
},
|
},
|
||||||
.out_res[0] = (struct cam_icp_res_info){
|
.out_res[0] = (struct cam_icp_res_info){
|
||||||
.format = 0x3, // YUV420NV12
|
.format = 0x3, // YUV420NV12
|
||||||
.width = sensor->frame_width,
|
.width = buf.out_img_width,
|
||||||
.height = sensor->frame_height,
|
.height = buf.out_img_height,
|
||||||
.fps = 20,
|
.fps = 20,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -20,6 +20,17 @@ const uint32_t os04c10_analog_gains_reg[] = {
|
|||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
|
void OS04C10::ife_downscale_configure() {
|
||||||
|
out_scale = 2;
|
||||||
|
|
||||||
|
pixel_size_mm = 0.002;
|
||||||
|
frame_width = 2688;
|
||||||
|
frame_height = 1520;
|
||||||
|
exposure_time_max = 2352;
|
||||||
|
|
||||||
|
init_reg_array.insert(init_reg_array.end(), std::begin(ife_downscale_override_array_os04c10), std::end(ife_downscale_override_array_os04c10));
|
||||||
|
}
|
||||||
|
|
||||||
OS04C10::OS04C10() {
|
OS04C10::OS04C10() {
|
||||||
image_sensor = cereal::FrameData::ImageSensor::OS04C10;
|
image_sensor = cereal::FrameData::ImageSensor::OS04C10;
|
||||||
bayer_pattern = CAM_ISP_PATTERN_BAYER_BGBGBG;
|
bayer_pattern = CAM_ISP_PATTERN_BAYER_BGBGBG;
|
||||||
|
@ -88,8 +88,6 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
|
|||||||
{0x37c7, 0xa8},
|
{0x37c7, 0xa8},
|
||||||
{0x37da, 0x11},
|
{0x37da, 0x11},
|
||||||
{0x381f, 0x08},
|
{0x381f, 0x08},
|
||||||
// {0x3829, 0x03},
|
|
||||||
// {0x3832, 0x00},
|
|
||||||
{0x3881, 0x00},
|
{0x3881, 0x00},
|
||||||
{0x3888, 0x04},
|
{0x3888, 0x04},
|
||||||
{0x388b, 0x00},
|
{0x388b, 0x00},
|
||||||
@ -199,7 +197,6 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
|
|||||||
{0x370b, 0x48},
|
{0x370b, 0x48},
|
||||||
{0x370c, 0x01},
|
{0x370c, 0x01},
|
||||||
{0x370f, 0x00},
|
{0x370f, 0x00},
|
||||||
{0x3714, 0x28},
|
|
||||||
{0x3716, 0x04},
|
{0x3716, 0x04},
|
||||||
{0x3719, 0x11},
|
{0x3719, 0x11},
|
||||||
{0x371a, 0x1e},
|
{0x371a, 0x1e},
|
||||||
@ -231,7 +228,6 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
|
|||||||
{0x37bd, 0x01},
|
{0x37bd, 0x01},
|
||||||
{0x37bf, 0x26},
|
{0x37bf, 0x26},
|
||||||
{0x37c0, 0x11},
|
{0x37c0, 0x11},
|
||||||
{0x37c2, 0x14},
|
|
||||||
{0x37cd, 0x19},
|
{0x37cd, 0x19},
|
||||||
{0x37e0, 0x08},
|
{0x37e0, 0x08},
|
||||||
{0x37e6, 0x04},
|
{0x37e6, 0x04},
|
||||||
@ -241,14 +237,9 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
|
|||||||
{0x37d8, 0x02},
|
{0x37d8, 0x02},
|
||||||
{0x37e2, 0x10},
|
{0x37e2, 0x10},
|
||||||
{0x3739, 0x10},
|
{0x3739, 0x10},
|
||||||
{0x3662, 0x08},
|
|
||||||
{0x37e4, 0x20},
|
{0x37e4, 0x20},
|
||||||
{0x37e3, 0x08},
|
{0x37e3, 0x08},
|
||||||
{0x37d9, 0x04},
|
|
||||||
{0x4040, 0x00},
|
{0x4040, 0x00},
|
||||||
{0x4041, 0x03},
|
|
||||||
{0x4008, 0x01},
|
|
||||||
{0x4009, 0x06},
|
|
||||||
|
|
||||||
// FSIN
|
// FSIN
|
||||||
{0x3002, 0x22},
|
{0x3002, 0x22},
|
||||||
@ -269,20 +260,11 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
|
|||||||
{0x3802, 0x00}, {0x3803, 0x00},
|
{0x3802, 0x00}, {0x3803, 0x00},
|
||||||
{0x3804, 0x0a}, {0x3805, 0x8f},
|
{0x3804, 0x0a}, {0x3805, 0x8f},
|
||||||
{0x3806, 0x05}, {0x3807, 0xff},
|
{0x3806, 0x05}, {0x3807, 0xff},
|
||||||
{0x3808, 0x05}, {0x3809, 0x40},
|
|
||||||
{0x380a, 0x02}, {0x380b, 0xf8},
|
|
||||||
{0x3811, 0x08},
|
{0x3811, 0x08},
|
||||||
{0x3813, 0x08},
|
{0x3813, 0x08},
|
||||||
{0x3814, 0x03},
|
|
||||||
{0x3815, 0x01},
|
{0x3815, 0x01},
|
||||||
{0x3816, 0x03},
|
|
||||||
{0x3817, 0x01},
|
{0x3817, 0x01},
|
||||||
|
|
||||||
{0x380c, 0x0b}, {0x380d, 0xac}, // HTS
|
|
||||||
{0x380e, 0x06}, {0x380f, 0x9c}, // VTS
|
|
||||||
|
|
||||||
{0x3820, 0xb3},
|
|
||||||
{0x3821, 0x01},
|
|
||||||
{0x3880, 0x00},
|
{0x3880, 0x00},
|
||||||
{0x3882, 0x20},
|
{0x3882, 0x20},
|
||||||
{0x3c91, 0x0b},
|
{0x3c91, 0x0b},
|
||||||
@ -331,4 +313,40 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
|
|||||||
// r
|
// r
|
||||||
{0x5104, 0x08}, {0x5105, 0xd6},
|
{0x5104, 0x08}, {0x5105, 0xd6},
|
||||||
{0x5144, 0x08}, {0x5145, 0xd6},
|
{0x5144, 0x08}, {0x5145, 0xd6},
|
||||||
|
|
||||||
|
{0x3714, 0x28},
|
||||||
|
{0x37c2, 0x14},
|
||||||
|
{0x3662, 0x08},
|
||||||
|
{0x37d9, 0x04},
|
||||||
|
{0x4041, 0x03},
|
||||||
|
{0x4008, 0x01},
|
||||||
|
{0x4009, 0x06},
|
||||||
|
{0x3808, 0x05}, {0x3809, 0x40},
|
||||||
|
{0x380a, 0x02}, {0x380b, 0xf8},
|
||||||
|
{0x3814, 0x03},
|
||||||
|
{0x3816, 0x03},
|
||||||
|
{0x380c, 0x0b}, {0x380d, 0xac}, // HTS
|
||||||
|
{0x380e, 0x06}, {0x380f, 0x9c}, // VTS
|
||||||
|
{0x3820, 0xb3},
|
||||||
|
{0x3821, 0x01},
|
||||||
|
};
|
||||||
|
|
||||||
|
const struct i2c_random_wr_payload ife_downscale_override_array_os04c10[] = {
|
||||||
|
// OS04C10_AA_00_02_17_wAO_2688x1524_MIPI728Mbps_Linear12bit_20FPS_4Lane_MCLK24MHz
|
||||||
|
{0x3829, 0x03},
|
||||||
|
{0x3714, 0x24},
|
||||||
|
{0x37c2, 0x04},
|
||||||
|
{0x3662, 0x10},
|
||||||
|
{0x37d9, 0x08},
|
||||||
|
{0x4041, 0x07},
|
||||||
|
{0x4008, 0x02},
|
||||||
|
{0x4009, 0x0d},
|
||||||
|
{0x3808, 0x0a}, {0x3809, 0x80},
|
||||||
|
{0x380a, 0x05}, {0x380b, 0xf0},
|
||||||
|
{0x3814, 0x01},
|
||||||
|
{0x3816, 0x01},
|
||||||
|
{0x380c, 0x08}, {0x380d, 0x5c}, // HTS
|
||||||
|
{0x380e, 0x09}, {0x380f, 0x38}, // VTS
|
||||||
|
{0x3820, 0xb0},
|
||||||
|
{0x3821, 0x00},
|
||||||
};
|
};
|
||||||
|
@ -29,6 +29,7 @@ public:
|
|||||||
uint32_t frame_stride;
|
uint32_t frame_stride;
|
||||||
uint32_t frame_offset = 0;
|
uint32_t frame_offset = 0;
|
||||||
uint32_t extra_height = 0;
|
uint32_t extra_height = 0;
|
||||||
|
int out_scale = 1;
|
||||||
int registers_offset = -1;
|
int registers_offset = -1;
|
||||||
int stats_offset = -1;
|
int stats_offset = -1;
|
||||||
int hdr_offset = -1;
|
int hdr_offset = -1;
|
||||||
@ -109,6 +110,7 @@ public:
|
|||||||
class OS04C10 : public SensorInfo {
|
class OS04C10 : public SensorInfo {
|
||||||
public:
|
public:
|
||||||
OS04C10();
|
OS04C10();
|
||||||
|
void ife_downscale_configure();
|
||||||
std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override;
|
std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override;
|
||||||
float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override;
|
float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override;
|
||||||
int getSlaveAddress(int port) const override;
|
int getSlaveAddress(int port) const override;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user