static RSFilterResponse * get_image(RSFilter *filter, const RSFilterRequest *request) { RSDcp *dcp = RS_DCP(filter); RSDcpClass *klass = RS_DCP_GET_CLASS(dcp); GdkRectangle *roi; RSFilterResponse *previous_response; RSFilterResponse *response; RS_IMAGE16 *input; RS_IMAGE16 *output; RS_IMAGE16 *tmp; gint j; RSFilterRequest *request_clone = rs_filter_request_clone(request); if (!dcp->use_profile) { gfloat premul[4] = {dcp->pre_mul.x, dcp->pre_mul.y, dcp->pre_mul.z, 1.0}; rs_filter_param_set_float4(RS_FILTER_PARAM(request_clone), "premul", premul); } rs_filter_param_set_object(RS_FILTER_PARAM(request_clone), "colorspace", klass->prophoto); previous_response = rs_filter_get_image(filter->previous, request_clone); g_object_unref(request_clone); if (!RS_IS_FILTER(filter->previous)) return previous_response; input = rs_filter_response_get_image(previous_response); if (!input) return previous_response; response = rs_filter_response_clone(previous_response); /* We always deliver in ProPhoto */ rs_filter_param_set_object(RS_FILTER_PARAM(response), "colorspace", klass->prophoto); g_object_unref(previous_response); if ((roi = rs_filter_request_get_roi(request))) { /* Align so we start at even pixel counts */ roi->width += (roi->x&1); roi->x -= (roi->x&1); roi->width = MIN(input->w - roi->x, roi->width); output = rs_image16_copy(input, FALSE); tmp = rs_image16_new_subframe(output, roi); bit_blt((char*)GET_PIXEL(tmp,0,0), tmp->rowstride * 2, (const char*)GET_PIXEL(input,roi->x,roi->y), input->rowstride * 2, tmp->w * tmp->pixelsize * 2, tmp->h); } else { output = rs_image16_copy(input, TRUE); tmp = g_object_ref(output); } g_object_unref(input); rs_filter_response_set_image(response, output); g_object_unref(output); g_static_rec_mutex_lock(&dcp_mutex); init_exposure(dcp); guint i, y_offset, y_per_thread, threaded_h; guint threads = rs_get_number_of_processor_cores(); if (tmp->h * tmp->w < 200*200) threads = 1; ThreadInfo *t = g_new(ThreadInfo, threads); threaded_h = tmp->h; y_per_thread = (threaded_h + threads-1)/threads; y_offset = 0; for (i = 0; i < threads; i++) { t[i].tmp = tmp; t[i].start_y = y_offset; t[i].start_x = 0; t[i].dcp = dcp; y_offset += y_per_thread; y_offset = MIN(tmp->h, y_offset); t[i].end_y = y_offset; for(j = 0; j < 256; j++) t[i].curve_input_values[j] = 0; t[i].single_thread = (threads == 1); if (threads == 1) start_single_dcp_thread(&t[0]); else t[i].threadid = g_thread_create(start_single_dcp_thread, &t[i], TRUE, NULL); } /* Wait for threads to finish */ for(i = 0; threads > 1 && i < threads; i++) g_thread_join(t[i].threadid); /* Settings can change now */ g_static_rec_mutex_unlock(&dcp_mutex); /* If we must deliver histogram data, do it now */ if (dcp->read_out_curve) { gint *values = g_malloc0(256*sizeof(gint)); for(i = 0; i < threads; i++) for(j = 0; j < 256; j++) values[j] += t[i].curve_input_values[j]; rs_curve_set_histogram_data(RS_CURVE_WIDGET(dcp->read_out_curve), values); g_free(values); } g_free(t); g_object_unref(tmp); return response; }
//*********************************************************************************** static void init_device (void) { struct v4l2_capability cap; struct v4l2_cropcap cropcap; struct v4l2_crop crop; struct v4l2_format fmt; unsigned int min; if (-1 == xioctl (fd, VIDIOC_QUERYCAP, &cap)) { if (EINVAL == errno) { fprintf (stderr, "%s is no V4L2 device\n", dev_name); exit (EXIT_FAILURE); } else { errno_exit ("VIDIOC_QUERYCAP"); } } else { printf ("Caps returns: 0x%x\n", cap.capabilities); } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf (stderr, "%s is no video capture device\n", dev_name); exit (EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf (stderr, "%s does not support read i/o\n", dev_name); //exit (EXIT_FAILURE); } break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf (stderr, "%s does not support streaming i/o\n", dev_name); exit (EXIT_FAILURE); } break; case IO_METHOD_SETEXPOSURE: puts("I am in expo"); init_exposure(); break; case IO_METHOD_SETGAIN : puts("I am in gain"); init_gain(); break; } /* Select video input, video standard and tune here. */ CLEAR (cropcap); cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (0 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) { crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c = cropcap.defrect; /* reset to default */ if (-1 == xioctl (fd, VIDIOC_S_CROP, &crop)) { switch (errno) { case EINVAL: /* Cropping not supported. */ fprintf(stderr, " Cropping not supported\n"); break; default: /* Errors ignored. */ break; } } } else { /* Errors ignored. */ } CLEAR (fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = TARGETWIDTH; fmt.fmt.pix.height = TARGETHEIGHT; fmt.fmt.pix.pixelformat = PIXELFMT; // defined at the top of the file printf("capture: size: W - %d H - %d, format: 0x%x\n", fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.pixelformat); fmt.fmt.pix.field = V4L2_FIELD_NONE; #if 1 if (-1 == xioctl (fd, VIDIOC_S_FMT, &fmt)) { printf("xioctl(VIDIOC_S_FMT) failed--->It's doesn't matter. Continue..."); } else { printf("VIDIOC_S_FMT returned success\n"); printf(" returned: pix.width: %d pix.height: %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); } //;jr;$* exit for now // printf("EXIT APPLICATION for now......\n"); // exit(0); #endif /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = fmt.fmt.pix.width * 2; if (fmt.fmt.pix.bytesperline < min) fmt.fmt.pix.bytesperline = min; min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; if (fmt.fmt.pix.sizeimage < min) fmt.fmt.pix.sizeimage = min; switch (io) { case IO_METHOD_READ: init_read (fmt.fmt.pix.sizeimage); break; case IO_METHOD_MMAP: init_mmap (); break; case IO_METHOD_USERPTR: init_userp (fmt.fmt.pix.sizeimage); break; case IO_METHOD_SETEXPOSURE: break; } }