Ejemplo n.º 1
0
/* Client has queried disconection from the camera.
 * Param:
 *  cc - Queried camera client descriptor.
 *  qc - Qemu client for the emulated camera.
 *  param - Query parameters. There are no parameters expected for this query.
 */
static void
_camera_client_query_disconnect(CameraClient* cc,
                                QemudClient* qc,
                                const char* param)
{
    if (cc->camera == NULL) {
        /* Already disconnected. */
        W("%s: Camera '%s' is already disconnected", __FUNCTION__, cc->device_name);
        _qemu_client_reply_ok(qc, "Camera is not connected");
        return;
    }

    /* Before we can go ahead and disconnect, we must make sure that camera is
     * not capturing frames. */
    if (cc->video_frame != NULL) {
        E("%s: Cannot disconnect camera '%s' while it is not stopped",
          __FUNCTION__, cc->device_name);
        _qemu_client_reply_ko(qc, "Camera is not stopped");
        return;
    }

    /* Close camera device. */
    camera_device_close(cc->camera);
    cc->camera = NULL;

    D("Camera device '%s' is now disconnected", cc->device_name);

    _qemu_client_reply_ok(qc, NULL);
}
int
enumerate_camera_devices(CameraInfo* cis, int max)
{
    char dev_name[24];
    int found = 0;
    int n;

    for (n = 0; n < max; n++) {
        CameraDevice* cd;

        sprintf(dev_name, "/dev/video%d", n);
        cd = camera_device_open(dev_name, 0);
        if (cd != NULL) {
            LinuxCameraDevice* lcd = (LinuxCameraDevice*)cd->opaque;
            if (!_camera_device_get_info(lcd, cis + found)) {
                char user_name[24];
                sprintf(user_name, "webcam%d", found);
                cis[found].display_name = ASTRDUP(user_name);
                cis[found].in_use = 0;
                found++;
            }
            camera_device_close(cd);
        } else {
            break;
        }
    }

    return found;
}
Ejemplo n.º 3
0
/* Frees emulated camera client descriptor. */
static void
_camera_client_free(CameraClient* cc)
{
    /* The only exception to the "read only" rule: we have to mark the camera
     * as being not used when we destroy a service for it. */
    if (cc->camera_info != NULL) {
        ((CameraInfo*)cc->camera_info)->in_use = 0;
    }
    if (cc->camera != NULL) {
        camera_device_close(cc->camera);
    }
    if (cc->video_frame != NULL) {
        free(cc->video_frame);
    }
    if (cc->device_name != NULL) {
        free(cc->device_name);
    }

    AFREE(cc);
}
int
enumerate_camera_devices(CameraInfo* cis, int max)
{
/* Array containing emulated webcam frame dimensions.
 * capXxx API provides device independent frame dimensions, by scaling frames
 * received from the device to whatever dimensions were requested by the user.
 * So, we can just use a small set of frame dimensions to emulate.
 */
static const CameraFrameDim _emulate_dims[] =
{
  /* Emulates 640x480 frame. */
  {640, 480},
  /* Emulates 352x288 frame (required by camera framework). */
  {352, 288},
  /* Emulates 320x240 frame (required by camera framework). */
  {320, 240},
  /* Emulates 176x144 frame (required by camera framework). */
  {176, 144}
};
    int inp_channel, found = 0;

    for (inp_channel = 0; inp_channel < 10 && found < max; inp_channel++) {
        char name[256];
        CameraDevice* cd;

        snprintf(name, sizeof(name), "%s%d", _default_window_name, found);
        cd = camera_device_open(name, inp_channel);
        if (cd != NULL) {
            WndCameraDevice* wcd = (WndCameraDevice*)cd->opaque;

            /* Unfortunately, on Windows we have to start capturing in order to get the
             * actual frame properties. */
            if (!camera_device_start_capturing(cd, V4L2_PIX_FMT_RGB32, 640, 480)) {
                cis[found].frame_sizes = (CameraFrameDim*)malloc(sizeof(_emulate_dims));
                if (cis[found].frame_sizes != NULL) {
                    char disp_name[24];
                    sprintf(disp_name, "webcam%d", found);
                    cis[found].display_name = ASTRDUP(disp_name);
                    cis[found].device_name = ASTRDUP(name);
                    cis[found].direction = ASTRDUP("front");
                    cis[found].inp_channel = inp_channel;
                    cis[found].frame_sizes_num = sizeof(_emulate_dims) / sizeof(*_emulate_dims);
                    memcpy(cis[found].frame_sizes, _emulate_dims, sizeof(_emulate_dims));
                    cis[found].pixel_format = wcd->pixel_format;
                    cis[found].in_use = 0;
                    found++;
                } else {
                    E("%s: Unable to allocate dimensions", __FUNCTION__);
                }
                camera_device_stop_capturing(cd);
            } else {
                /* No more cameras. */
                camera_device_close(cd);
                break;
            }
            camera_device_close(cd);
        } else {
            /* No more cameras. */
            break;
        }
    }

    return found;
}