void CameraTester::connectToCamera() { emit logHappened("connecting"); memset(&m_listener, 0, sizeof(m_listener)); // not sure while it's needed, but test example does this nullification m_cc = android_camera_connect_to(BACK_FACING_CAMERA_TYPE, &m_listener); if (m_cc == NULL) { emit logHappened("Problem connecting to camera"); } else { emit logHappened("connected"); } m_listener.context = m_cc; }
int main(int argc, char** argv) { CameraControlListener listener; memset(&listener, 0, sizeof(listener)); listener.on_msg_error_cb = error_msg_cb; listener.on_msg_shutter_cb = shutter_msg_cb; listener.on_msg_focus_cb = autofocus_msg_cb; listener.on_msg_zoom_cb = zoom_msg_cb; listener.on_data_raw_image_cb = raw_data_cb; listener.on_data_compressed_image_cb = jpeg_data_cb; listener.on_preview_texture_needs_update_cb = preview_texture_needs_update_cb; CameraControl* cc = android_camera_connect_to(BACK_FACING_CAMERA_TYPE, &listener); if (cc == NULL) { printf("Problem connecting to camera"); return 1; } listener.context = cc; //AndroidEventListener event_listener; //event_listener.on_new_event = on_new_input_event; //event_listener.context = cc; //InputStackConfiguration input_configuration = { true, 25000 }; //android_input_stack_initialize(&event_listener, &input_configuration); //android_input_stack_start(); android_camera_dump_parameters(cc); android_camera_enumerate_supported_picture_sizes(cc, size_cb, NULL); android_camera_enumerate_supported_preview_sizes(cc, size_cb, NULL); int min_fps, max_fps, current_fps; android_camera_get_preview_fps_range(cc, &min_fps, &max_fps); printf("Preview fps range: [%d,%d]\n", min_fps, max_fps); android_camera_get_preview_fps(cc, ¤t_fps); printf("Current preview fps range: %d\n", current_fps); android_camera_set_preview_size(cc, 960, 720); int width, height; android_camera_get_preview_size(cc, &width, &height); printf("Current preview size: [%d,%d]\n", width, height); android_camera_get_picture_size(cc, &width, &height); printf("Current picture size: [%d,%d]\n", width, height); int zoom; //android_camera_get_current_zoom(cc, &zoom); printf("Current zoom: %d \n", zoom); android_camera_get_max_zoom(cc, &zoom); printf("Max zoom: %d \n", zoom); EffectMode effect_mode; FlashMode flash_mode; WhiteBalanceMode wb_mode; SceneMode scene_mode; AutoFocusMode af_mode; CameraPixelFormat pixel_format; android_camera_get_effect_mode(cc, &effect_mode); android_camera_get_flash_mode(cc, &flash_mode); android_camera_get_white_balance_mode(cc, &wb_mode); android_camera_get_scene_mode(cc, &scene_mode); android_camera_get_auto_focus_mode(cc, &af_mode); //android_camera_get_preview_format(cc, &pixel_format); printf("Current effect mode: %d \n", effect_mode); printf("Current flash mode: %d \n", flash_mode); printf("Current wb mode: %d \n", wb_mode); printf("Current scene mode: %d \n", scene_mode); printf("Current af mode: %d \n", af_mode); printf("Current preview pixel format: %d \n", pixel_format); //android_camera_set_focus_region(cc, -200, -200, 200, 200, 300); /* ClientWithSurface cs = client_with_surface(true ); // Associate surface with egl. if (!cs.surface) { printf("Problem acquiring surface for preview"); return 1; } EGLDisplay disp = sf_client_get_egl_display(cs.client); EGLSurface surface = sf_surface_get_egl_surface(cs.surface); RenderData render_data; GLuint preview_texture_id; glGenTextures(1, &preview_texture_id); glClearColor(1.0, 0., 0.5, 1.); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); android_camera_set_preview_texture(cc, preview_texture_id); */ //android_camera_set_effect_mode(cc, EFFECT_MODE_SEPIA); //android_camera_set_flash_mode(cc, FLASH_MODE_AUTO); //android_camera_set_auto_focus_mode(cc, AUTO_FOCUS_MODE_CONTINUOUS_PICTURE); //android_camera_start_preview(cc); android_camera_take_snapshot(cc); //GLfloat transformation_matrix[16]; //android_camera_get_preview_texture_transformation(cc, transformation_matrix); //glUniformMatrix4fv(render_data.matrix_loc, 1, GL_FALSE, transformation_matrix); printf("Started camera preview.\n"); while (true) { //printf("loop\n"); if (new_camera_frame_available){ printf("New preview frame available"); new_camera_frame_available = false; } } return 0; }
int main(int argc, char** argv) { CameraControlListener listener; memset(&listener, 0, sizeof(listener)); listener.on_msg_error_cb = error_msg_cb; listener.on_msg_shutter_cb = shutter_msg_cb; listener.on_msg_focus_cb = autofocus_msg_cb; listener.on_msg_zoom_cb = zoom_msg_cb; listener.on_data_raw_image_cb = raw_data_cb; listener.on_data_compressed_image_cb = jpeg_data_cb; listener.on_preview_texture_needs_update_cb = preview_texture_needs_update_cb; camera_control = android_camera_connect_to(BACK_FACING_CAMERA_TYPE, &listener); if (camera_control == NULL) { printf("Problem connecting to camera"); return 1; } listener.context = camera_control; AndroidEventListener event_listener; event_listener.on_new_event = on_new_input_event; event_listener.context = camera_control; InputStackConfiguration input_configuration = { enable_touch_point_visualization : true, default_layer_for_touch_point_visualization : 10000, input_area_width : 1024, input_area_height : 1024 }; android_input_stack_initialize(&event_listener, &input_configuration); android_input_stack_start(); // Set the still photo size android_camera_enumerate_supported_picture_sizes(camera_control, size_cb, NULL); if (camera_width == 0 && camera_height == 0) { camera_width = 320; camera_height = 240; } android_camera_set_picture_size(camera_control, camera_width, camera_height); // Set the still photo thumbnail size android_camera_enumerate_supported_thumbnail_sizes(camera_control, thumbnail_size_cb, NULL); if (thumbnail_width == 0 && thumbnail_height == 0) { thumbnail_width = 320; thumbnail_height = 240; } android_camera_set_thumbnail_size(camera_control, thumbnail_width, thumbnail_height); AutoFocusMode af_mode; android_camera_get_auto_focus_mode(camera_control, &af_mode); printf("Current af mode: %d \n", af_mode); int zoom; android_camera_set_zoom(camera_control, 0); android_camera_get_max_zoom(camera_control, &zoom); printf("Max zoom: %d \n", zoom); android_camera_enumerate_supported_video_sizes(camera_control, size_cb, NULL); android_camera_enumerate_supported_preview_sizes(camera_control, size_cb, NULL); android_camera_set_preview_size(camera_control, camera_width, camera_height); int min_fps, max_fps, current_fps; android_camera_get_preview_fps_range(camera_control, &min_fps, &max_fps); printf("Preview fps range: [%d,%d]\n", min_fps, max_fps); android_camera_get_preview_fps(camera_control, ¤t_fps); printf("Current preview fps range: %d\n", current_fps); #if 0 android_camera_dump_parameters(camera_control); android_camera_set_display_orientation(camera_control, 90); int width, height; android_camera_get_preview_size(camera_control, &width, &height); printf("Current preview size: [%d,%d]\n", width, height); android_camera_get_picture_size(camera_control, &width, &height); printf("Current picture size: [%d,%d]\n", width, height); android_camera_get_current_zoom(camera_control, &zoom); printf("Current zoom: %d \n", zoom); EffectMode effect_mode; FlashMode flash_mode; WhiteBalanceMode wb_mode; //SceneMode scene_mode; CameraPixelFormat pixel_format; android_camera_get_effect_mode(camera_control, &effect_mode); printf("Current effect mode: %d \n", effect_mode); android_camera_get_flash_mode(camera_control, &flash_mode); printf("Current flash mode: %d \n", flash_mode); android_camera_get_white_balance_mode(camera_control, &wb_mode); ALOGD("Current wb mode: %d \n", wb_mode); #if 0 // Disabled, causes the test app to crash android_camera_get_scene_mode(camera_control, &scene_mode); printf("Current scene mode: %d \n", scene_mode); #endif android_camera_get_preview_format(camera_control, &pixel_format); printf("Current preview pixel format: %d \n", pixel_format); //android_camera_set_focus_region(camera_control, -200, -200, 200, 200, 300); #endif printf("Creating client with surface"); ClientWithSurface cs = client_with_surface(true /* Associate surface with egl. */); if (!cs.surface) { printf("Problem acquiring surface for preview"); return 1; } printf("Finished creating client with surface\n"); disp = sf_client_get_egl_display(cs.client); surface = sf_surface_get_egl_surface(cs.surface); GLuint preview_texture_id; printf("Getting a texture id\n"); glGenTextures(1, &preview_texture_id); glClearColor(1.0, 0., 0.5, 1.); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); printf("About to set preview texture\n"); android_camera_set_preview_texture(camera_control, preview_texture_id); #if 0 android_camera_set_effect_mode(camera_control, EFFECT_MODE_SEPIA); android_camera_set_flash_mode(camera_control, FLASH_MODE_AUTO); android_camera_set_auto_focus_mode(camera_control, AUTO_FOCUS_MODE_CONTINUOUS_PICTURE); #endif android_camera_start_preview(camera_control); GLfloat transformation_matrix[16]; android_camera_get_preview_texture_transformation(camera_control, transformation_matrix); glUniformMatrix4fv(render_data.matrix_loc, 1, GL_FALSE, transformation_matrix); printf("Started camera preview.\n"); while (1) { usleep(50); } stop_video_recording(recorder); android_camera_stop_preview(camera_control); android_camera_disconnect(camera_control); }