void CLKernel::event_notify (cl_event event, cl_int status, void* data) { KernelUserData *kernel_data = (KernelUserData *)data; XCAM_ASSERT (event == kernel_data->event->get_event_id ()); XCAM_UNUSED (status); XCAM_UNUSED (event); delete kernel_data; }
void CLContext::program_pfn_notify ( cl_program program, void *user_data) { CLContext *context = (CLContext*) user_data; char kernel_names [XCAM_CL_MAX_STR_SIZE]; XCAM_UNUSED (context); XCAM_UNUSED (program); xcam_mem_clear (kernel_names); //clGetProgramInfo (program, CL_PROGRAM_KERNEL_NAMES, sizeof (kernel_names) - 1, kernel_names, NULL); //XCAM_LOG_DEBUG ("cl program report error on kernels: %s", kernel_names); }
void CLContext::context_pfn_notify ( const char* erro_info, const void *private_info, size_t cb, void *user_data ) { CLContext *context = (CLContext*) user_data; XCAM_UNUSED (context); XCAM_UNUSED (erro_info); XCAM_UNUSED (private_info); XCAM_UNUSED (cb); XCAM_LOG_DEBUG ("cl context pfn error:%s", XCAM_STR (erro_info)); }
void X3aAnalyzeTuner::x3a_calculation_done (XAnalyzer *analyzer, X3aResultList &results) { XCAM_UNUSED (analyzer); _results.clear (); _results.assign (results.begin (), results.end ()); }
XCamReturn AiqCommonHandler::analyze (X3aResultList &output) { ia_aiq *ia_handle = NULL; ia_aiq_gbce_results *gbce_result = NULL; ia_err ia_error = ia_err_none; XCAM_UNUSED (output); AnalyzerHandler::HanlderLock lock(this); if (has_gbce_unlock()) { ia_aiq_gbce_input_params gbce_input; xcam_mem_clear (&gbce_input); gbce_input.gbce_level = ia_aiq_gbce_level_use_tuning; gbce_input.frame_use = _aiq_compositor->get_frame_use (); gbce_input.ev_shift = 0; // Todo ia_handle = _aiq_compositor->get_handle (); XCAM_ASSERT (ia_handle); ia_error = ia_aiq_gbce_run (ia_handle, &gbce_input, &gbce_result); XCAM_FAIL_RETURN (ERROR, ia_error == ia_err_none, XCAM_RETURN_ERROR_AIQ, "AIQ run GBCE failed"); //TODO, need copy GBCE result out, not just assign _gbce_result = gbce_result; } else { _gbce_result = NULL; } return XCAM_RETURN_NO_ERROR; }
XCamReturn AiqAfHandler::analyze (X3aResultList &output) { // TODO XCAM_UNUSED (output); return XCAM_RETURN_NO_ERROR; }
XCamReturn AiqAwbHandler::analyze (X3aResultList &output) { ia_aiq *ia_handle = NULL; ia_aiq_awb_results *awb_ret = NULL; ia_err ia_error = ia_err_none; XCAM_UNUSED (output); AnalyzerHandler::HanlderLock lock(this); if (!ensure_ia_parameters ()) { XCAM_LOG_ERROR ("AIQ AE ensure ia parameters failed"); return XCAM_RETURN_ERROR_PARAM; } ia_handle = _aiq_compositor->get_handle (); XCAM_ASSERT (ia_handle); ia_error = ia_aiq_awb_run (ia_handle, &_input, &awb_ret); XCAM_FAIL_RETURN (ERROR, ia_error == ia_err_none, XCAM_RETURN_ERROR_AIQ, "AIQ run AWB failed"); _result = *awb_ret; if (!_started) { _history_result = _result; _started = true; } adjust_speed (_history_result); _history_result = _result; return XCAM_RETURN_NO_ERROR; }
void AnalyzerCallback::x3a_calculation_failed (X3aAnalyzer *analyzer, int64_t timestamp, const char *msg) { XCAM_UNUSED (analyzer); XCAM_LOG_WARNING ( "Calculate 3a result failed, ts(" XCAM_TIMESTAMP_FORMAT "), msg:%s", XCAM_TIMESTAMP_ARGS (timestamp), XCAM_STR (msg)); }
XCamReturn SmartAnalyzer::internal_init (uint32_t width, uint32_t height, double framerate) { XCAM_UNUSED (width); XCAM_UNUSED (height); XCAM_UNUSED (framerate); SmartHandlerList::iterator i_handler = _handlers.begin (); for (; i_handler != _handlers.end (); ++i_handler) { SmartPtr<SmartAnalysisHandler> handler = *i_handler; XCamReturn ret = handler->create_context (handler); if (ret != XCAM_RETURN_NO_ERROR) { XCAM_LOG_WARNING ("smart analyzer initilize handler(%s) context failed", XCAM_STR(handler->get_name())); } } return XCAM_RETURN_NO_ERROR; }
XCamReturn CLImageKernel::post_execute (SmartPtr<DrmBoBuffer> &output) { XCAM_UNUSED (output); _image_in.release (); _image_out.release (); return XCAM_RETURN_NO_ERROR; }
XCamReturn CLImageScaler::execute_done (SmartPtr<VideoBuffer> &output) { XCAM_UNUSED (output); get_context ()->finish(); XCAM_ASSERT (_scaler_buf.ptr ()); //post buffer out return post_buffer (_scaler_buf); }
void X3aAnalyzer::notify_calculation_failed (AnalyzerHandler *handler, int64_t timestamp, const char *msg) { XCAM_UNUSED (handler); if (_callback) _callback->x3a_calculation_failed (this, timestamp, msg); XCAM_LOG_DEBUG ( "calculation failed on ts:" XCAM_TIMESTAMP_FORMAT ", reason:%s", XCAM_TIMESTAMP_ARGS (timestamp), XCAM_STR (msg)); }
bool CL3aImageProcessor::set_tnr (uint32_t mode, uint8_t level) { XCAM_UNUSED (level); _tnr_mode = mode; STREAM_LOCK; if (_yuv_pipe.ptr ()) _yuv_pipe->set_tnr_enable (_tnr_mode & CL_TNR_TYPE_YUV); return true; }
void AnalyzerCallback::x3a_calculation_done (X3aAnalyzer *analyzer, X3aResultList &results) { XCAM_UNUSED (analyzer); for (X3aResultList::iterator i_res = results.begin(); i_res != results.end(); ++i_res) { SmartPtr<X3aResult> res = *i_res; if (res.ptr() == NULL) continue; XCAM_LOG_DEBUG ( "calculated 3a result(type:%d, timestamp:" XCAM_TIMESTAMP_FORMAT ")", res->get_type (), XCAM_TIMESTAMP_ARGS (res->get_timestamp ())); } }
bool CL3aImageProcessor::set_tnr (uint32_t mode, uint8_t level) { _tnr_mode = mode; STREAM_LOCK; //TODO: map denoise level to threshold & gain XCAM_UNUSED(level); bool ret = false; if (_tnr_rgb.ptr ()) ret = _tnr_rgb->set_kernels_enable (mode & CL_TNR_TYPE_RGB); if (_tnr_yuv.ptr ()) ret = _tnr_yuv->set_kernels_enable (mode & CL_TNR_TYPE_YUV); return ret; }
void HybridAnalyzer::x3a_calculation_done (XAnalyzer *analyzer, X3aResultList &results) { XCAM_UNUSED (analyzer); static XCam3aResultHead *res_heads[XCAM_3A_MAX_RESULT_COUNT]; xcam_mem_clear (res_heads); XCAM_ASSERT (results.size () < XCAM_3A_MAX_RESULT_COUNT); uint32_t result_count = translate_3a_results_to_xcam (results, res_heads, XCAM_3A_MAX_RESULT_COUNT); convert_results (res_heads, result_count, results); for (uint32_t i = 0; i < result_count; ++i) { if (res_heads[i]) free_3a_result (res_heads[i]); } notify_calculation_done (results); }
void * SmartBufferPriv::buf_get_bo (XCamVideoBufferIntel *data) { #if HAVE_LIBDRM SmartBufferPriv *buf = (SmartBufferPriv*) data; XCAM_ASSERT (buf->_buf_ptr.ptr ()); SmartPtr<DrmBoBuffer> bo_buf = buf->_buf_ptr.dynamic_cast_ptr<DrmBoBuffer> (); XCAM_FAIL_RETURN ( ERROR, bo_buf.ptr (), NULL, "get DrmBoBuffer failed"); return bo_buf->get_bo (); #else XCAM_LOG_ERROR ("VideoBuffer doesn't support DrmBoBuffer"); XCAM_UNUSED (data); return NULL; #endif }
void HybridAnalyzer::x3a_calculation_failed (XAnalyzer *analyzer, int64_t timestamp, const char *msg) { XCAM_UNUSED (analyzer); notify_calculation_failed (NULL, timestamp, msg); }
XCamReturn CL3aImageProcessor::apply_3a_result (SmartPtr<X3aResult> &result) { XCAM_UNUSED (result); return XCAM_RETURN_NO_ERROR; }
XCamReturn CL3aImageProcessor::apply_3a_results (X3aResultList &results) { XCAM_UNUSED (results); return XCAM_RETURN_NO_ERROR; }
bool CL3aImageProcessor::can_process_result (SmartPtr<X3aResult> &result) { XCAM_UNUSED (result); return false; }
XCamReturn V4l2Device::post_set_format (struct v4l2_format &format) { XCAM_UNUSED (format); return XCAM_RETURN_NO_ERROR; }
XCamReturn CL3aImageProcessor::apply_3a_result (SmartPtr<X3aResult> &result) { STREAM_LOCK; if (result.ptr() == NULL) return XCAM_RETURN_BYPASS; uint32_t res_type = result->get_type (); switch (res_type) { case XCAM_3A_RESULT_WHITE_BALANCE: { SmartPtr<X3aWhiteBalanceResult> wb_res = result.dynamic_cast_ptr<X3aWhiteBalanceResult> (); XCAM_ASSERT (wb_res.ptr ()); if (_bayer_basic_pipe.ptr ()) { _bayer_basic_pipe->set_wb_config (wb_res->get_standard_result ()); _bayer_basic_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_BLACK_LEVEL: { SmartPtr<X3aBlackLevelResult> bl_res = result.dynamic_cast_ptr<X3aBlackLevelResult> (); XCAM_ASSERT (bl_res.ptr ()); if (_bayer_basic_pipe.ptr ()) { _bayer_basic_pipe->set_blc_config (bl_res->get_standard_result ()); _bayer_basic_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_DEFECT_PIXEL_CORRECTION: { SmartPtr<X3aDefectPixelResult> def_res = result.dynamic_cast_ptr<X3aDefectPixelResult> (); XCAM_ASSERT (def_res.ptr ()); XCAM_UNUSED (def_res); break; } case XCAM_3A_RESULT_RGB2YUV_MATRIX: { SmartPtr<X3aColorMatrixResult> csc_res = result.dynamic_cast_ptr<X3aColorMatrixResult> (); XCAM_ASSERT (csc_res.ptr ()); if (_csc.ptr()) { _csc->set_rgbtoyuv_matrix (csc_res->get_standard_result ()); _csc->set_3a_result (result); } if (_yuv_pipe.ptr()) { _yuv_pipe->set_rgbtoyuv_matrix (csc_res->get_standard_result ()); _yuv_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_MACC: { SmartPtr<X3aMaccMatrixResult> macc_res = result.dynamic_cast_ptr<X3aMaccMatrixResult> (); XCAM_ASSERT (macc_res.ptr ()); if (_yuv_pipe.ptr()) { _yuv_pipe->set_macc_table (macc_res->get_standard_result ()); _yuv_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_R_GAMMA: case XCAM_3A_RESULT_B_GAMMA: break; case XCAM_3A_RESULT_G_GAMMA: case XCAM_3A_RESULT_Y_GAMMA: { SmartPtr<X3aGammaTableResult> gamma_res = result.dynamic_cast_ptr<X3aGammaTableResult> (); XCAM_ASSERT (gamma_res.ptr ()); if (_bayer_basic_pipe.ptr ()) { _bayer_basic_pipe->set_gamma_table (gamma_res->get_standard_result ()); _bayer_basic_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_TEMPORAL_NOISE_REDUCTION_RGB: { SmartPtr<X3aTemporalNoiseReduction> tnr_res = result.dynamic_cast_ptr<X3aTemporalNoiseReduction> (); XCAM_ASSERT (tnr_res.ptr ()); XCAM_UNUSED (tnr_res); break; } case XCAM_3A_RESULT_TEMPORAL_NOISE_REDUCTION_YUV: { SmartPtr<X3aTemporalNoiseReduction> tnr_res = result.dynamic_cast_ptr<X3aTemporalNoiseReduction> (); XCAM_ASSERT (tnr_res.ptr ()); if (_yuv_pipe.ptr ()) { _yuv_pipe->set_tnr_yuv_config(tnr_res->get_standard_result ()); _yuv_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_EDGE_ENHANCEMENT: { SmartPtr<X3aEdgeEnhancementResult> ee_ee_res = result.dynamic_cast_ptr<X3aEdgeEnhancementResult> (); XCAM_ASSERT (ee_ee_res.ptr ()); if (_bayer_pipe.ptr()) { _bayer_pipe->set_ee_config (ee_ee_res->get_standard_result ()); _bayer_pipe->set_3a_result (result); } #if ENABLE_YEENR_HANDLER if (_ee.ptr()) { _ee->set_ee_config_ee (ee_ee_res->get_standard_result ()); _ee->set_3a_result (result); } #endif break; } case XCAM_3A_RESULT_BAYER_NOISE_REDUCTION: { SmartPtr<X3aBayerNoiseReduction> bnr_res = result.dynamic_cast_ptr<X3aBayerNoiseReduction> (); XCAM_ASSERT (bnr_res.ptr ()); if (_bayer_pipe.ptr()) { _bayer_pipe->set_bnr_config (bnr_res->get_standard_result ()); _bayer_pipe->set_3a_result (result); } break; } case XCAM_3A_RESULT_BRIGHTNESS: { SmartPtr<X3aBrightnessResult> brightness_res = result.dynamic_cast_ptr<X3aBrightnessResult> (); XCAM_ASSERT (brightness_res.ptr ()); float brightness_level = ((XCam3aResultBrightness)brightness_res->get_standard_result()).brightness_level; XCAM_UNUSED (brightness_level); break; } case XCAM_3A_RESULT_WAVELET_NOISE_REDUCTION: { SmartPtr<X3aWaveletNoiseReduction> wavelet_res = result.dynamic_cast_ptr<X3aWaveletNoiseReduction> (); XCAM_ASSERT (wavelet_res.ptr ()); if (_wavelet.ptr()) { _wavelet->set_denoise_config (wavelet_res->get_standard_result ()); } if (_newwavelet.ptr()) { _newwavelet->set_denoise_config (wavelet_res->get_standard_result ()); } break; } case XCAM_3A_RESULT_FACE_DETECTION: { SmartPtr<X3aFaceDetectionResult> fd_res = result.dynamic_cast_ptr<X3aFaceDetectionResult> (); XCAM_ASSERT (fd_res.ptr ()); if (_wire_frame.ptr ()) { _wire_frame->set_wire_frame_config (fd_res->get_standard_result_ptr (), get_scaler_factor ()); } break; } default: XCAM_LOG_WARNING ("CL3aImageProcessor unknow 3a result:%d", res_type); break; } return XCAM_RETURN_NO_ERROR; }