Example #1
0
void on_new_input_event(Event* event, void* context)
{
	assert(context);

	if (event->type == KEY_EVENT_TYPE && event->action == ISCL_KEY_EVENT_ACTION_UP) {
		printf("We got a key event: %d \n", event->details.key.key_code);

		CameraControl* cc = static_cast<CameraControl*>(context);

		switch(event->details.key.key_code) {
		case ISCL_KEYCODE_VOLUME_UP:
			printf("Starting video recording to /cache/test_recording.mp4\n");
			start_video_recording(cc);
			recording = true;
			break;
		case ISCL_KEYCODE_VOLUME_DOWN:
			printf("Stopping video recording\n");
			stop_video_recording(recorder);
			recording = false;
			break;
		case ISCL_KEYCODE_POWER:
			printf("\tTaking a photo now.\n");
			android_camera_take_snapshot(cc);
			break;
		case ISCL_KEYCODE_HEADSETHOOK:
			printf("\tSwitching effect.\n");
			android_camera_set_effect_mode(cc, next_effect());
		}
	} else if (event->type == MOTION_EVENT_TYPE &&
			event->details.motion.pointer_count == 1) {
		if ((event->action & ISCL_MOTION_EVENT_ACTION_MASK) == ISCL_MOTION_EVENT_ACTION_UP) {
			printf("\tMotion event(Action up): (%f, %f) \n",
					event->details.motion.pointer_coordinates[0].x,
					event->details.motion.pointer_coordinates[0].y);
		}

		if ((event->action & ISCL_MOTION_EVENT_ACTION_MASK) == ISCL_MOTION_EVENT_ACTION_DOWN) {
			printf("\tMotion event(Action down): (%f, %f) \n",
					event->details.motion.pointer_coordinates[0].x,
					event->details.motion.pointer_coordinates[0].y);
		}
	}
}
int main(int argc, char** argv)
{
	CameraControlListener listener;
	memset(&listener, 0, sizeof(listener));
	listener.on_msg_error_cb = error_msg_cb;
	listener.on_msg_shutter_cb = shutter_msg_cb;
	listener.on_msg_focus_cb = autofocus_msg_cb;
	listener.on_msg_zoom_cb = zoom_msg_cb;

	listener.on_data_raw_image_cb = raw_data_cb;
	listener.on_data_compressed_image_cb = jpeg_data_cb;
	listener.on_preview_texture_needs_update_cb = preview_texture_needs_update_cb;
	CameraControl* cc = android_camera_connect_to(BACK_FACING_CAMERA_TYPE,
			&listener);

	if (cc == NULL) {
		printf("Problem connecting to camera");
		return 1;
	}

	listener.context = cc;

	//AndroidEventListener event_listener;
	//event_listener.on_new_event = on_new_input_event;
	//event_listener.context = cc;

	//InputStackConfiguration input_configuration = { true, 25000 };

	//android_input_stack_initialize(&event_listener, &input_configuration);
	//android_input_stack_start();

	android_camera_dump_parameters(cc);
	android_camera_enumerate_supported_picture_sizes(cc, size_cb, NULL);
	android_camera_enumerate_supported_preview_sizes(cc, size_cb, NULL);

	int min_fps, max_fps, current_fps;
	android_camera_get_preview_fps_range(cc, &min_fps, &max_fps);
	printf("Preview fps range: [%d,%d]\n", min_fps, max_fps);
	android_camera_get_preview_fps(cc, &current_fps);
	printf("Current preview fps range: %d\n", current_fps);

	android_camera_set_preview_size(cc, 960, 720);

	int width, height;
	android_camera_get_preview_size(cc, &width, &height);
	printf("Current preview size: [%d,%d]\n", width, height);
	android_camera_get_picture_size(cc, &width, &height);
	printf("Current picture size: [%d,%d]\n", width, height);
	int zoom;
	//android_camera_get_current_zoom(cc, &zoom);
	printf("Current zoom: %d \n", zoom);
	android_camera_get_max_zoom(cc, &zoom);
	printf("Max zoom: %d \n", zoom);


	EffectMode effect_mode;
	FlashMode flash_mode;
	WhiteBalanceMode wb_mode;
	SceneMode scene_mode;
	AutoFocusMode af_mode;
	CameraPixelFormat pixel_format;
	android_camera_get_effect_mode(cc, &effect_mode);
	android_camera_get_flash_mode(cc, &flash_mode);
	android_camera_get_white_balance_mode(cc, &wb_mode);
	android_camera_get_scene_mode(cc, &scene_mode);
	android_camera_get_auto_focus_mode(cc, &af_mode);
	//android_camera_get_preview_format(cc, &pixel_format);
	printf("Current effect mode: %d \n", effect_mode);
	printf("Current flash mode: %d \n", flash_mode);
	printf("Current wb mode: %d \n", wb_mode);
	printf("Current scene mode: %d \n", scene_mode);
	printf("Current af mode: %d \n", af_mode);
	printf("Current preview pixel format: %d \n", pixel_format);
	//android_camera_set_focus_region(cc, -200, -200, 200, 200, 300);

	/*
	ClientWithSurface cs = client_with_surface(true ); // Associate surface with egl.

	if (!cs.surface) {
		printf("Problem acquiring surface for preview");
		return 1;
	}

	EGLDisplay disp = sf_client_get_egl_display(cs.client);
	EGLSurface surface = sf_surface_get_egl_surface(cs.surface);

	RenderData render_data;
	GLuint preview_texture_id;
	glGenTextures(1, &preview_texture_id);
	glClearColor(1.0, 0., 0.5, 1.);
	glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(
			GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(
			GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        
	android_camera_set_preview_texture(cc, preview_texture_id);
	
        */
        //android_camera_set_effect_mode(cc, EFFECT_MODE_SEPIA);
	//android_camera_set_flash_mode(cc, FLASH_MODE_AUTO);
	//android_camera_set_auto_focus_mode(cc, AUTO_FOCUS_MODE_CONTINUOUS_PICTURE);
	//android_camera_start_preview(cc);

        android_camera_take_snapshot(cc);

	//GLfloat transformation_matrix[16];
	//android_camera_get_preview_texture_transformation(cc, transformation_matrix);
	//glUniformMatrix4fv(render_data.matrix_loc, 1, GL_FALSE, transformation_matrix);


	printf("Started camera preview.\n");
	while (true) {
	//printf("loop\n");
		if (new_camera_frame_available){
		  printf("New preview frame available");
		  new_camera_frame_available = false;
		}
	}


        return 0;
}