void* videoFrame(void){ int cnt = 0; VideoCopy black; while(1){ vidbuf = camera_get_frame(fdCamera); memcpy(vidbuf_overlay.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(vidbuf_overlay.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(vidbuf_overlay.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); memcpy(bufCopy.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(bufCopy.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(bufCopy.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); camera_release_frame(fdCamera,vidbuf); if(stopflag == 1) break; } camera_stop(fdCamera); camera_close(fdCamera); }
/* Refactored out, code to reset the camera after a frame size change. */ void camera_reset (unsigned int width) { imgWidth = width; if (width == 160) { imgHeight = 128; strcpy1(imgHead, "##IMJ3"); camera_stop(); i2cwrite(0x30, ov9655_qqvga, sizeof(ov9655_qqvga)>>1); } else if (width == 320) {
static int camera_exit(struct camera * dev) { camera_stop(dev); //解映射 int numBufs ; for( numBufs = 0; numBufs < dev->v4l_info.v4l_num; numBufs++) munmap(dev->v4l_info.video_buf[numBufs].start, dev->v4l_info.video_buf[numBufs].length); free(dev->v4l_info.video_buf); close(dev->fd); printf("camera closed...\n"); return 0; }
void camera_restart(void) { VideoCircularBuffer *vcb = &video_circular_buffer; pthread_mutex_lock(&vcb->mutex); video_record_stop(vcb); vcb->state = VCB_STATE_RESTARTING; pikrellcam.camera_adjust = camera_adjust_temp; /* May not be changed */ pthread_mutex_unlock(&vcb->mutex); camera_stop(); camera_start(); }
void endProgram(void) { int i, cntdown = 3; // 정지 command =26; for (i = 0; i < cntdown; i++) { usleep(COMMAND_DELAY); // delay write(fdBackBoard, &command, 1); printf("command: %d\n", command); } // 옆으로 피하는 코드 추가 필요 camera_stop(fdCamera); camera_close(fdCamera); }
void MainWindow::createActions() { openAction = new QAction(tr("&open..."),this); connect(openAction,SIGNAL(triggered()),this,SLOT(open())); saveAction = new QAction(tr("&save..."),this); connect(saveAction,SIGNAL(triggered()),this,SLOT(save())); exitAction = new QAction(tr("&exit"), this); connect(exitAction, SIGNAL(triggered()),this,SLOT(exit())); facedetectAction = new QAction(tr("&Face Detect"),this); connect(facedetectAction, SIGNAL(triggered()),this,SLOT(facedetect())); powertfAction = new QAction(tr("&Power Transform"),this); connect(powertfAction, SIGNAL(triggered()),this,SLOT(powertf())); edgedetectAction = new QAction(tr("&Edge Detect"),this); connect(edgedetectAction, SIGNAL(triggered()),this,SLOT(edgedetect())); houghAction = new QAction(tr("&Hough Check"),this); connect(houghAction, SIGNAL(triggered()), this, SLOT(houghcheck())); morphAction = new QAction(tr("&Morphology"),this); connect(morphAction, SIGNAL(triggered()), this, SLOT(morphology())); blurAction = new QAction(tr("&Blur"),this); connect(blurAction, SIGNAL(triggered()), this, SLOT(bluropt())); histeqAction = new QAction(tr("&Histeq"),this); connect(histeqAction, SIGNAL(triggered()), this, SLOT(histeqopt())); aboutAction = new QAction(tr("&about"),this); connect(aboutAction, SIGNAL(triggered()),this,SLOT(about())); docAction = new QAction(tr("&doc"),this); connect(docAction, SIGNAL(triggered()),this,SLOT(doc())); backwardAction = new QAction(tr("&backward"),this); backwardAction->setIcon(QIcon(":/images/backward.png")); connect(backwardAction,SIGNAL(triggered()), this, SLOT(backward())); forwardAction = new QAction(tr("&forward"),this); forwardAction->setIcon(QIcon(":/images/forward.png")); connect(forwardAction,SIGNAL(triggered()), this, SLOT(forward())); camstartAction = new QAction(tr("&CAM_Start"), this); camstartAction->setIcon(QIcon(":/images/Start.png")); camstartAction->setEnabled(true); connect(camstartAction,SIGNAL(triggered()),this,SLOT(camera_start())); camstopAction = new QAction(tr("&CAM_Stop"), this); camstopAction->setIcon(QIcon(":/images/Stop.png")); camstopAction->setEnabled(false); connect(camstopAction,SIGNAL(triggered()),this,SLOT(camera_stop())); }
void* videoFrame(void){ int cnt = 0; while(1){ vidbuf = camera_get_frame(fdCamera); memcpy(vidbuf_overlay.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(vidbuf_overlay.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(vidbuf_overlay.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); memcpy(bufCopy.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(bufCopy.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(bufCopy.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); camera_release_frame(fdCamera,vidbuf); if(stopFlag == 1) break; /* if(obj.x_point >=0 && obj.x_point <320 && obj.y_point >=0 && obj.y_point < 240){ int i, j; for(i = obj.x_point; i<obj.x_point + 10; i++){ for(j = obj.y_point; j<obj.y_point + 10; j++){ int index = j*320 + i; vidbuf_overlay.ycbcr.y[index] = 120; vidbuf_overlay.ycbcr.cb[index/2] = 230; vidbuf_overlay.ycbcr.cr[index/2] = 30; } } } */ } camera_stop(fdCamera); camera_close(fdCamera); }
/* camera_delete */ void camera_delete(Camera * camera) { size_t i; camera_stop(camera); if(camera->pp_window != NULL) gtk_widget_destroy(camera->pp_window); if(camera->pr_window != NULL) gtk_widget_destroy(camera->pr_window); for(i = 0; i < camera->overlays_cnt; i++) cameraoverlay_delete(camera->overlays[i]); free(camera->overlays); if(camera->channel != NULL) { /* XXX we ignore errors at this point */ g_io_channel_shutdown(camera->channel, TRUE, NULL); g_io_channel_unref(camera->channel); } if(camera->pixmap != NULL) g_object_unref(camera->pixmap); if(camera->gc != NULL) g_object_unref(camera->gc); if(camera->bold != NULL) pango_font_description_free(camera->bold); if(camera->fd >= 0) close(camera->fd); if((char *)camera->rgb_buffer != camera->raw_buffer) free(camera->rgb_buffer); for(i = 0; i < camera->buffers_cnt; i++) if(camera->buffers[i].start != MAP_FAILED) munmap(camera->buffers[i].start, camera->buffers[i].length); free(camera->buffers); free(camera->raw_buffer); string_delete(camera->device); object_delete(camera); }
/** * Pause previewing */ static javacall_result camera_pause(javacall_handle handle) { return camera_stop(handle); }
void endProgram(void) { camera_stop(fdCamera); camera_close(fdCamera); }
int webcam_run() { #ifdef HAVE_CAMERA // Our mandatory renderers, ensures they are run first being registered last struct Node *n = cameras.l_head; while (list_isNode(n)) { CAMERA camera = (CAMERA) n; n = n->n_succ; // Put annotated last list_addTail(&camera->renderers.renderers, &create_annotatedrenderer()->node); // These go first list_addHead(&camera->renderers.renderers, &create_thumbnailrenderer()->node); list_addHead(&camera->renderers.renderers, &create_rawrenderer()->node); } #endif // Now start the system up webserver_initialise(config); #ifdef HAVE_CAMERA // Initialise the renderers n = cameras.l_head; while (list_isNode(n)) { CAMERA camera = (CAMERA) n; n = n->n_succ; imagerenderer_init(camera); } #endif // Initialise the loggers logger_start(); // Finish off configuring the webserver, default port etc webserver_set_defaults(); #ifdef HAVE_CAMERA n = cameras.l_head; while (list_isNode(n)) { CAMERA camera = (CAMERA) n; n = n->n_succ; imagerenderer_postinit(camera); } #endif sensor_postinit(); // The camera home page //create_homepage(); // Start everything up webserver_start(); #ifdef HAVE_CAMERA camera_start(); #endif // Now the main loop, monitor for sensor updates //sensor_loop(); while (1) sleep(60); // Shutdown - we never actually get here webserver_stop(); logger_stop(); #ifdef HAVE_CAMERA camera_stop(); #endif return 0; }