// From android::CameraListener void CameraControl::notify(int32_t msg_type, int32_t ext1, int32_t ext2) { REPORT_FUNCTION(); printf("\text1: %d, ext2: %d \n", ext1, ext2); if (!listener) return; switch (msg_type) { case CAMERA_MSG_ERROR: if (listener->on_msg_error_cb) listener->on_msg_error_cb(listener->context); break; case CAMERA_MSG_SHUTTER: if (listener->on_msg_shutter_cb) listener->on_msg_shutter_cb(listener->context); break; case CAMERA_MSG_ZOOM: if (listener->on_msg_zoom_cb) listener->on_msg_zoom_cb(listener->context, ext1); break; case CAMERA_MSG_FOCUS: if (listener->on_msg_focus_cb) listener->on_msg_focus_cb(listener->context); break; default: break; } }
void android_camera_take_snapshot(CameraControl* control) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera->takePicture(CAMERA_MSG_SHUTTER | CAMERA_MSG_COMPRESSED_IMAGE); }
void android_camera_update_preview_texture(CameraControl* control) { REPORT_FUNCTION(); assert(control); control->preview_texture->updateTexImage(); }
void CDVDMediaCodecInfo::UpdateTexImage() { CSingleLock lock(m_section); REPORT_FUNCTION(); if (!m_valid) return; // updateTexImage will check and spew any prior gl errors, // clear them before we call updateTexImage. glGetError(); // this is key, after calling releaseOutputBuffer, we must // wait a little for MediaCodec to render to the surface. // Then we can updateTexImage without delay. If we do not // wait, then video playback gets jerky. To optomize this, // we hook the SurfaceTexture OnFrameAvailable callback // using CJNISurfaceTextureOnFrameAvailableListener and wait // on a CEvent to fire. 50ms seems to be a good max fallback. m_frameready->WaitMSec(50); //m_surfacetexture->updateTexImage(); gl_consumer_update_texture(m_surfacetexture); //m_timestamp = m_surfacetexture->getTimestamp(); }
static void on_client_died_cb(void *context) { REPORT_FUNCTION(); CLog::Log(LOGERROR, "%s::%s - %s", CLASSNAME, __func__, "decodingservice client died" ); }
void android_camera_set_focus_region( CameraControl* control, FocusRegion* region) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); static const char* focus_region_pattern = "(%d,%d,%d,%d,%d)"; static char focus_region[256]; snprintf(focus_region, sizeof(focus_region), focus_region_pattern, region->left, region->top, region->right, region->bottom, region->weight); control->camera_parameters.set( android::CameraParameters::KEY_FOCUS_AREAS, focus_region); control->camera->setParameters(control->camera_parameters.flatten()); }
void set_frame_available_cb(FrameAvailableCb cb, void *context) { REPORT_FUNCTION(); frame_available_cb = cb; frame_available_context = context; gl_consumer_set_frame_available_cb(m_surfaceTexture, &CDVDMediaCodecOnFrameAvailable::on_frame_available_cb, static_cast<void*>(this)); }
void CameraControl::postData( int32_t msg_type, const android::sp<android::IMemory>& data, camera_frame_metadata_t* metadata) { REPORT_FUNCTION(); if (!listener) return; switch (msg_type) { case CAMERA_MSG_RAW_IMAGE: if (listener->on_data_raw_image_cb) listener->on_data_raw_image_cb(data->pointer(), data->size(), listener->context); break; case CAMERA_MSG_COMPRESSED_IMAGE: if (listener->on_data_compressed_image_cb) listener->on_data_compressed_image_cb(data->pointer(), data->size(), listener->context); break; default: break; } camera->releaseRecordingFrame(data); }
void android_camera_dump_parameters(CameraControl* control) { REPORT_FUNCTION(); assert(control); printf("%s \n", control->camera->getParameters().string()); }
void CameraRecordService::instantiate() { REPORT_FUNCTION(); defaultServiceManager()->addService( String16(ICameraRecordService::exported_service_name()), service_instance()); ALOGV("Added Binder service '%s' to ServiceManager", ICameraRecordService::exported_service_name()); }
CDVDMediaCodecOnFrameAvailable(GLConsumerWrapperHybris surfaceTexture) : m_surfaceTexture(surfaceTexture) , frame_available_cb(false), frame_available_context(NULL) { //m_surfaceTexture->setOnFrameAvailableListener(*this); REPORT_FUNCTION(); set_frame_available_cb(true,m_surfaceTexture); }
void android_camera_stop_autofocus(CameraControl* control) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera->cancelAutoFocus(); }
void android_camera_start_preview(CameraControl* control) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera->startPreview(); }
void android_camera_get_preview_fps(CameraControl* control, int* fps) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); *fps = control->camera_parameters.getPreviewFrameRate(); }
void android_camera_set_display_orientation(CameraControl* control, int32_t clockwise_rotation_degree) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); static const int32_t ignored_parameter = 0; control->camera->sendCommand(CAMERA_CMD_SET_DISPLAY_ORIENTATION, clockwise_rotation_degree, ignored_parameter); }
void android_camera_set_preview_fps(CameraControl* control, int fps) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera_parameters.setPreviewFrameRate(fps); control->camera->setParameters(control->camera_parameters.flatten()); }
void android_camera_disconnect(CameraControl* control) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera->disconnect(); control->camera->unlock(); }
void android_camera_set_preview_surface(CameraControl* control, SfSurface* surface) { REPORT_FUNCTION(); assert(control); assert(surface); android::Mutex::Autolock al(control->guard); control->camera->setPreviewDisplay(surface->surface); }
void android_camera_get_max_zoom(CameraControl* control, int* zoom) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); *zoom = control->camera_parameters.getInt(android::CameraParameters::KEY_MAX_ZOOM); }
void android_camera_get_video_size(CameraControl* control, int* width, int* height) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera_parameters.getVideoSize(width, height); }
void android_camera_get_preview_fps_range(CameraControl* control, int* min, int* max) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera_parameters.getPreviewFpsRange(min, max); }
void CDVDVideoCodecHybris::InitSurfaceTexture(void) { REPORT_FUNCTION(); // We MUST create the GLES texture on the main thread // to match where the valid GLES context is located. // It would be nice to move this out of here, we would need // to create/fetch/create from g_RenderMananger. But g_RenderMananger // does not know we are using MediaCodec until Configure and we // we need m_surfaceTexture valid before then. Chicken, meet Egg. if (g_application.IsCurrentThread()) { // localize GLuint so we do not spew gles includes in our header GLuint texture_id; glGenTextures(1, &texture_id); glBindTexture( GL_TEXTURE_EXTERNAL_OES, texture_id); glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glBindTexture( GL_TEXTURE_EXTERNAL_OES, 0); m_textureId = texture_id; CLog::Log(LOGNOTICE, "%s::%s::%d - m_textureId = %x", CLASSNAME, __func__, __LINE__,(unsigned int)m_textureId); //m_surfaceTexture = std::shared_ptr<CJNISurfaceTexture>(new CJNISurfaceTexture(m_textureId)); decoding_service_init(); decoding_service_set_client_death_cb(&on_client_died_cb, 123, static_cast<void*>(this)); DSSessionWrapperHybris decoding_session = decoding_service_create_session(123); CLog::Log(LOGNOTICE, "%s::%s::%d - decoding_session = %x", CLASSNAME, __func__, __LINE__,(unsigned int)decoding_session); IGBCWrapperHybris igbc_wrapper = decoding_service_get_igraphicbufferconsumer(); CLog::Log(LOGNOTICE, "%s::%s::%d - igbc_wrapper = %x", CLASSNAME, __func__, __LINE__,(unsigned int)igbc_wrapper); m_surfaceTexture = gl_consumer_create_by_id_with_igbc(m_textureId, igbc_wrapper ); CLog::Log(LOGNOTICE, "%s::%s::%d - m_surfaceTexture = %x", CLASSNAME, __func__, __LINE__,(unsigned int)m_surfaceTexture); // hook the surfaceTexture OnFrameAvailable callback m_frameAvailable = std::shared_ptr<CDVDMediaCodecOnFrameAvailable>(new CDVDMediaCodecOnFrameAvailable(m_surfaceTexture)); CLog::Log(LOGNOTICE, "%s::%s::%d - m_frameAvailable = %x", CLASSNAME, __func__, __LINE__,(unsigned int)&(*m_frameAvailable)); IGBPWrapperHybris igbp = decoding_service_get_igraphicbufferproducer(); CLog::Log(LOGNOTICE, "%s::%s::%d - igbp = %x", CLASSNAME, __func__, __LINE__,(unsigned int)igbp); m_surface = surface_texture_client_create_by_igbp(igbp); CLog::Log(LOGNOTICE, "%s::%s::%d - m_surface = %x", CLASSNAME, __func__, __LINE__,(unsigned int)m_surface); surface_texture_client_set_hardware_rendering (m_surface, true); } else { ThreadMessageCallback callbackData; callbackData.callback = &CallbackInitSurfaceTexture; callbackData.userptr = (void*)this; ThreadMessage msg; msg.dwMessage = TMSG_CALLBACK; msg.lpVoid = (void*)&callbackData; // wait for it. CApplicationMessenger::Get().SendMessage(msg, true); } return; }
void on_frame_available() { REPORT_FUNCTION(); if (frame_available_cb) { OnFrameAvailable(frame_available_context); } else CLog::Log(LOGERROR, "%s::%s - %s", CLASSNAME, __func__, "frame_available_cb is NULL, can't call frame_available_cb()" ); }
void android_camera_set_video_size(CameraControl* control, int width, int height) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera_parameters.setVideoSize(width, height); control->camera->setParameters(control->camera_parameters.flatten()); }
void CDVDMediaCodecInfo::GetTransformMatrix(float *textureMatrix) { CSingleLock lock(m_section); REPORT_FUNCTION(); if (!m_valid) return; //m_surfacetexture->getTransformMatrix(textureMatrix); gl_consumer_get_transformation_matrix(m_surfacetexture,textureMatrix); }
static void on_frame_available_cb(GLConsumerWrapperHybris wrapper, void* context) { REPORT_FUNCTION(); if (context != NULL) { CDVDMediaCodecOnFrameAvailable* p = static_cast<CDVDMediaCodecOnFrameAvailable*>(context); p->on_frame_available(); } else CLog::Log(LOGERROR, "%s::%s - %s", CLASSNAME, __func__, "context is NULL, can't call on_frame_available()" ); }
void android_camera_get_preview_texture_transformation(CameraControl* control, float m[16]) { REPORT_FUNCTION(); assert(control); if (control->preview_texture == NULL) return; control->preview_texture->getTransformMatrix(m); }
void CameraControl::postDataTimestamp( nsecs_t timestamp, int32_t msg_type, const android::sp<android::IMemory>& data) { REPORT_FUNCTION(); (void) timestamp; (void) msg_type; (void) data; }
void android_camera_set_flash_mode(CameraControl* control, FlashMode mode) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera_parameters.set( android::CameraParameters::KEY_FLASH_MODE, flash_modes[mode]); control->camera->setParameters(control->camera_parameters.flatten()); }
void android_camera_set_rotation(CameraControl* control, int rotation) { REPORT_FUNCTION(); assert(control); android::Mutex::Autolock al(control->guard); control->camera_parameters.set( android::CameraParameters::KEY_ROTATION, rotation); control->camera->setParameters(control->camera_parameters.flatten()); }