int main() { int i; struct picture_t pic; struct encoded_pic_t encoded_pic; errno = 0; if(!camera_init(&pic)) goto error_cam; if(!encoder_init(&pic)){ fprintf(stderr,"failed to initialize encoder\n"); goto error_encoder; } if(!preview_init(&pic)) goto error_preview; if(!output_init(&pic)) goto error_output; if(!encoder_encode_headers(&encoded_pic)) goto error_output; if(!output_write_headers(&encoded_pic)) goto error_output; if(!camera_on()) goto error_cam_on; if(signal(SIGINT, stop_recording) == SIG_ERR){ fprintf(stderr,"signal() failed\n"); goto error_signal; } printf("Press ctrl-c to stop recording...\n"); recording = 1; for(i=0; recording; i++){ if(!camera_get_frame(&pic)) break; gen_osd_info(); osd_print(&pic, osd_string); if((i&7)==0) // i%8==0 preview_display(&pic); if(!encoder_encode_frame(&pic, &encoded_pic)) break; applog_flush(); if(!output_write_frame(&encoded_pic)) break; } printf("\nrecorded %d frames\n", i); error_signal: camera_off(); error_cam_on: output_close(); error_output: preview_close(); error_preview: encoder_close(); error_encoder: camera_close(); error_cam: return 0; }
int main(int argc, char* argv[]) { char* device = argc > 1 ? argv[1] : "/dev/video0"; camera_t* camera = camera_open(device); if (!camera) { fprintf(stderr, "[%s] %s\n", device, strerror(errno)); return EXIT_FAILURE; } char name[5]; camera_format_t format; camera_config_get(camera, &format); camera_format_name(format.format, name); puts("[current config]"); printf("- [%s] w: %d, h: %d, fps: %d/%d\n", name, format.width, format.height, format.interval.denominator, format.interval.numerator); puts("[available formats]"); camera_formats_t* formats = camera_formats_new(camera); for (size_t i = 0; i < formats->length; i++) { camera_format_name(formats->head[i].format, name); printf("- [%s] w: %d, h: %d, fps: %d/%d\n", name, formats->head[i].width, formats->head[i].height, formats->head[i].interval.denominator, formats->head[i].interval.numerator); } camera_formats_delete(formats); camera_close(camera); return EXIT_SUCCESS; }
static void appExit() { printf("##appExit\n"); //if (bHasAudio) { audio_stop(); audio_close(); printf("audio_close\n"); } close_encode(); audio_dec_exit(); video_process_stop(); usleep(200 * 1000); camera_close(); printf("camera_close\n"); encode_close(); printf("encode_close\n"); mux_exit(); printf("mux_close\n"); encode_destroy(); akuio_pmem_fini(); setled_off(); PTZControlDeinit(); printf("akuio_pmem_fini\n"); record_rename_file(); }
void ScreenCapture::cleanup() { sleep(2); qDebug() << "all done, shutting down...\n"; _err = CAMERA_EOK; if (_doMovie) { _err = camera_stop_video_viewfinder(_handle); if (_err != EOK) { qDebug() << "error stopping viewfinder: " << strerror(_err) << "\n"; } else { qDebug() << "stopped viewfinder\n"; _doMovie = false; } } if (_cameraOpen) { camera_close(_handle); _cameraOpen = false; qDebug() << "closed camera: " << _handle << "\n"; } }
bool BbCameraSession::isCaptureModeSupported(QCamera::CaptureModes mode) const { if (m_handle == CAMERA_HANDLE_INVALID) { // the camera has not been loaded yet via QCamera::load(), so // we open it temporarily to peek for the supported capture modes camera_unit_t unit = CAMERA_UNIT_REAR; if (m_device == cameraIdentifierFront()) unit = CAMERA_UNIT_FRONT; else if (m_device == cameraIdentifierRear()) unit = CAMERA_UNIT_REAR; else if (m_device == cameraIdentifierDesktop()) unit = CAMERA_UNIT_DESKTOP; camera_handle_t handle; const camera_error_t result = camera_open(unit, CAMERA_MODE_RW, &handle); if (result != CAMERA_EOK) return true; const bool supported = isCaptureModeSupported(handle, mode); camera_close(handle); return supported; } else { return isCaptureModeSupported(m_handle, mode); } }
void* videoFrame(void){ int cnt = 0; VideoCopy black; while(1){ vidbuf = camera_get_frame(fdCamera); memcpy(vidbuf_overlay.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(vidbuf_overlay.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(vidbuf_overlay.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); memcpy(bufCopy.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(bufCopy.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(bufCopy.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); camera_release_frame(fdCamera,vidbuf); if(stopflag == 1) break; } camera_stop(fdCamera); camera_close(fdCamera); }
int main() { Button button; int status; start_buttons(); status = camera_open(); if (status == FALSE) { printf("Could not open the camera.\n"); return 1; } while (TRUE) { button = button_pressed(); switch (button) { case A_BUTTON: cv_show_image(RAW_IMAGE); break; case B_BUTTON: cv_show_image(GRAYSCALE); break; case C_BUTTON: cv_show_image(CANNY); break; case X_BUTTON: cv_show_image(HOUGHLINES); break; case Y_BUTTON: camera_update(); break; case Z_BUTTON: cv_show_image(RAW_IMAGE); break; } } camera_close(); return 0; }
FlashlightPrivate::~FlashlightPrivate() { if (camHandle != CAMERA_HANDLE_INVALID) { auto error = camera_close(camHandle); if (error != CAMERA_EOK) { qDebug("Failed to close camera: %s.", errorStr(error).c_str()); } } }
static void bb10camera_detect(MSWebCamManager *obj) { camera_error_t error; camera_handle_t handle; error = camera_open(CAMERA_UNIT_FRONT, CAMERA_MODE_RW, &handle); if (error == CAMERA_EOK) { if (camera_has_feature(handle, CAMERA_FEATURE_VIDEO)) { if (camera_can_feature(handle, CAMERA_FEATURE_VIDEO)) { MSWebCam *cam = ms_web_cam_new(&ms_bb10_camera_desc); cam->name = ms_strdup("BB10 Front Camera"); ms_message("[bb10_capture] camera added: %s", cam->name); ms_web_cam_manager_add_cam(obj, cam); camera_close(handle); } else { ms_warning("[bb10_capture] front camera has video feature but can't do it..."); } } else { ms_warning("[bb10_capture] front camera doesn't have video feature"); } } else { ms_warning("[bb10_capture] Can't open front camera: %s", error_to_string(error)); } error = camera_open(CAMERA_UNIT_REAR, CAMERA_MODE_RW, &handle); if (error == CAMERA_EOK) { if (camera_has_feature(handle, CAMERA_FEATURE_VIDEO)) { if (camera_can_feature(handle, CAMERA_FEATURE_VIDEO)) { MSWebCam *cam = ms_web_cam_new(&ms_bb10_camera_desc); cam->name = ms_strdup("BB10 Rear Camera"); ms_message("[bb10_capture] camera added: %s", cam->name); ms_web_cam_manager_add_cam(obj, cam); camera_close(handle); } else { ms_warning("[bb10_capture] rear camera has video feature but can't do it..."); } } else { ms_warning("[bb10_capture] rear camera doesn't have video feature"); } } else { ms_warning("[bb10_capture] Can't open rear camera: %s", error_to_string(error)); } }
static void bb10capture_close_camera(BB10Capture *d) { if (!d->camera_openned) { ms_warning("[bb10_capture] camera not openned, skipping..."); return; } camera_close(d->cam_handle); d->camera_openned = FALSE; d->cam_handle = 0; ms_debug("[bb10_capture] camera closed"); }
int main(int argc, char **argv) { const int usage = SCREEN_USAGE_NATIVE; screen_window_t screen_win; screen_buffer_t screen_buf = NULL; int rect[4] = { 0, 0, 0, 0 }; // create an application window which will just act as a background screen_create_context(&screen_ctx, 0); screen_create_window(&screen_win, screen_ctx); screen_create_window_group(screen_win, vf_group); screen_set_window_property_iv(screen_win, SCREEN_PROPERTY_USAGE, &usage); screen_create_window_buffers(screen_win, 1); screen_get_window_property_pv(screen_win, SCREEN_PROPERTY_RENDER_BUFFERS, (void **)&screen_buf); screen_get_window_property_iv(screen_win, SCREEN_PROPERTY_BUFFER_SIZE, rect+2); // fill the window with black int attribs[] = { SCREEN_BLIT_COLOR, 0x00000000, SCREEN_BLIT_END }; screen_fill(screen_ctx, screen_buf, attribs); screen_post_window(screen_win, screen_buf, 1, rect, 0); // position the window at an arbitrary z-order int i = APP_ZORDER; screen_set_window_property_iv(screen_win, SCREEN_PROPERTY_ZORDER, &i); // Signal bps library that navigator and screen events will be requested bps_initialize(); screen_request_events(screen_ctx); navigator_request_events(0); // open camera and configure viewfinder if (init_camera() == EOK) { // our main loop just runs a state machine and handles input while (!shutdown) { run_state_machine(); // Handle user input handle_event(); } if (state == STATE_VIEWFINDER) { // clean up camera camera_stop_photo_viewfinder(handle); camera_close(handle); } } // Clean up screen_stop_events(screen_ctx); bps_shutdown(); screen_destroy_window(screen_win); screen_destroy_context(screen_ctx); return 0; }
int main(int argc, char* argv[]) { if(!camera_open()) { printf("Failed to open camera.\n"); return 1; } camera_update(); printf("Closing camera...\n"); camera_close(); return 0; }
int main(int argc, char **argv) { struct camera *cam = NULL; cam = (struct camera *)malloc(sizeof(struct camera)); if (!cam) { printf("malloc camera failure!\n"); exit(1); } memset(cam, 0, sizeof(struct camera)); cam->device_name = "/dev/video0"; cam->buffers = NULL; cam->width = 640; cam->height = 480; cam->display_depth = 5; /* RGB24 */ cam->h264_file_name = "test.h264"; camera_open(cam); camera_init(cam); camera_capturing_start(cam); h264_compress_init(&cam->en, cam->width, cam->height); cam->h264_buf = (uint8_t *) malloc(sizeof(uint8_t) * cam->width * cam->height * 3); // 设置缓冲区 if ((cam->h264_fp = fopen(cam->h264_file_name, "wa+")) == NULL) { printf("open file error!\n"); return -1; } while (1) { if (read_and_encode_frame(cam) < 0) { fprintf(stderr, "read_fram fail in thread\n"); //break; } } printf("-----------end program------------"); if (cam->h264_fp != NULL) fclose(cam->h264_fp); h264_compress_uninit(&cam->en); free(cam->h264_buf); camera_capturing_stop(cam); camera_uninit(cam); camera_close(cam); free(cam); return 0; }
void ssigterm_handler(int signal, struct sigcontext sc) { if ( started ) { #ifdef HAVE_GTK if ( settings.viewfinder ) viewfinder_off(&settings); #endif debug("closing camera\n"); camera_close(&settings); } server_exit_nicely(); }
void endProgram(void) { int i, cntdown = 3; // 정지 command =26; for (i = 0; i < cntdown; i++) { usleep(COMMAND_DELAY); // delay write(fdBackBoard, &command, 1); printf("command: %d\n", command); } // 옆으로 피하는 코드 추가 필요 camera_stop(fdCamera); camera_close(fdCamera); }
int main() { camera_open(); camera_update(); graphics_open(get_camera_width(), get_camera_height()); while(!get_key_state('Q')) { camera_update(); graphics_blit_enc(get_camera_frame(), BGR, 0, 0, get_camera_width(), get_camera_height()); graphics_circle_fill(get_camera_width() / 2, get_camera_height() / 2, get_camera_height() / 20, 255, 0, 0); graphics_update(); } camera_close(); graphics_close(); return 0; }
void* videoFrame(void){ int cnt = 0; while(1){ vidbuf = camera_get_frame(fdCamera); memcpy(vidbuf_overlay.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(vidbuf_overlay.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(vidbuf_overlay.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); memcpy(bufCopy.ycbcr.y,vidbuf->ycbcr.y,len_vidbuf); memcpy(bufCopy.ycbcr.cb,vidbuf->ycbcr.cb,len_vidbuf/2); memcpy(bufCopy.ycbcr.cr,vidbuf->ycbcr.cr,len_vidbuf/2); camera_release_frame(fdCamera,vidbuf); if(stopFlag == 1) break; /* if(obj.x_point >=0 && obj.x_point <320 && obj.y_point >=0 && obj.y_point < 240){ int i, j; for(i = obj.x_point; i<obj.x_point + 10; i++){ for(j = obj.y_point; j<obj.y_point + 10; j++){ int index = j*320 + i; vidbuf_overlay.ycbcr.y[index] = 120; vidbuf_overlay.ycbcr.cb[index/2] = 230; vidbuf_overlay.ycbcr.cr[index/2] = 30; } } } */ } camera_stop(fdCamera); camera_close(fdCamera); }
FlashlightPrivate::FlashlightPrivate() : camHandle { CAMERA_HANDLE_INVALID }, lightOn { false } { auto error = camera_open(CAMERA_UNIT_REAR, CAMERA_MODE_PREAD | CAMERA_MODE_PWRITE, &camHandle); if (error == CAMERA_EOK) { auto hasLight = camera_can_feature(camHandle, CAMERA_FEATURE_VIDEOLIGHT); if (!hasLight) { qDebug("Flashlight error: video light not available."); error = camera_close(camHandle); if (error != CAMERA_EOK) { qDebug("Flashlight error: failed to close camera: %s.", errorStr(error).c_str()); } camHandle = CAMERA_HANDLE_INVALID; } } else { qDebug("Flashlight error: failed to open camera: %s.", errorStr(error).c_str()); } }
int main() { printf("beginning camera calibration pgm\n"); create_connect(); enable_servos(); camera_open(LOW_RES); printf("1.0 after initialization\n"); //place create for camera recognition of cube positions forward(9); rightAngle(LEFT); forward(15); rightAngle(RIGHT); calibrateCamera(); printf("the end...\n"); camera_close(); create_disconnect(); return 0; }
void stop(){ ao(); camera_close(); disable_servos(); }
void snap(float exposure, float gain, char *color, char *filename) { int m_sock; short unsigned int *image; int imgmean; int shutterflag; FILE *outfile; wait_counter = 0; // open hardware and file camera_init(); // use non-TDI config file setGain(gain); maestro_open(&m_sock); outfile = fopen(filename, "w"); // configure hardware maestro_setcolor(m_sock, color); //maestro_darkfield_off(m_sock); //setGain(gain);moved to line 174 // determine whether or not to use the shutter if(!strcmp(color, "none")) { shutterflag = 0; maestro_darkfield_on(m_sock); } else { shutterflag = 1; maestro_darkfield_off(m_sock); } // setup the software to receive an image from the camera setupSnap(); // snap the image maestro_snap(m_sock, (int)(exposure * 1000.0), shutterflag); // wait for image to be received by framegrabber while(!snapReceived()) { wait_counter++; usleep(1000); if(wait_counter > 20000) { attempt_counter++; network_iboot_off(&m_sock); sleep(1); network_iboot_on(&m_sock); sleep(1); wait_counter = 0; } } // get pointer to image image = getSnapImage(); // calculate mean for informational purposes, then write image to file imgmean = imagemean(image); fprintf(stdout, "Image mean: %d\n", imgmean); fwrite(image, sizeof(short unsigned int), 1000000, outfile); fprintf(stdout, "finish outputing image"); // close hardware and file if(!strcmp(color, "none")) maestro_darkfield_off(m_sock); fclose(outfile); fprintf(stdout, "closing camera"); camera_close(); } // end function
int servermode_socket(int servermode_timeout) { int socket_desc, c , read_size; struct sockaddr_in server , client; char client_message[2000]; char *arg; char buf[2100]; pslr_handle_t camhandle=NULL; pslr_status status; char C; float F = 0; pslr_rational_t shutter_speed = {0, 0}; uint32_t iso = 0; uint32_t auto_iso_min = 0; uint32_t auto_iso_max = 0; //Create socket socket_desc = socket(AF_INET , SOCK_STREAM , 0); if (socket_desc == -1) { fprintf(stderr, "Could not create socket"); } int enable = 1; if (setsockopt(socket_desc, SOL_SOCKET, SO_REUSEADDR, &enable, sizeof(int)) < 0) { fprintf(stderr, "setsockopt(SO_REUSEADDR) failed"); } DPRINT("Socket created\n"); //Prepare the sockaddr_in structure server.sin_family = AF_INET; server.sin_addr.s_addr = INADDR_ANY; server.sin_port = htons( 8888 ); //Bind if ( bind(socket_desc,(struct sockaddr *)&server , sizeof(server)) < 0) { fprintf(stderr, "bind failed. Error"); return 1; } DPRINT("bind done\n"); //Listen listen(socket_desc , 3); //Accept and incoming connection DPRINT("Waiting for incoming connections...\n"); c = sizeof(struct sockaddr_in); while ( true ) { fd_set rfds; struct timeval tv; int retval; FD_ZERO(&rfds); FD_SET(socket_desc, &rfds); tv.tv_sec = servermode_timeout; tv.tv_usec = 0; retval = select(socket_desc+1, &rfds, NULL, NULL, &tv); if (retval == -1) { DPRINT("select error\n"); exit(1); } else if (retval) { client_sock = accept(socket_desc, (struct sockaddr *)&client, (socklen_t*)&c); if (client_sock < 0) { fprintf(stderr, "accept failed"); return 1; } DPRINT("Connection accepted\n"); } else { DPRINT("Timeout\n"); close(socket_desc); exit(0); } //Receive a message from client while ( (read_size = recv(client_sock , client_message , 2000 , 0)) > 0 ) { client_message[read_size]='\0'; strip( client_message ); DPRINT(":%s:\n",client_message); if ( !strcmp(client_message, "stopserver" ) ) { if ( camhandle ) { camera_close(camhandle); } write_socket_answer("0\n"); exit(0); } else if ( !strcmp(client_message, "disconnect" ) ) { if ( camhandle ) { camera_close(camhandle); } write_socket_answer("0\n"); } else if ( (arg = is_string_prefix( client_message, "echo")) != NULL ) { sprintf( buf, "0 %.100s\n", arg); write_socket_answer(buf); } else if ( (arg = is_string_prefix( client_message, "usleep")) != NULL ) { int microseconds = atoi(arg); usleep(microseconds); write_socket_answer("0\n"); } else if ( !strcmp(client_message, "connect") ) { if ( camhandle ) { write_socket_answer("0\n"); } else if ( (camhandle = camera_connect( NULL, NULL, -1, buf )) ) { write_socket_answer("0\n"); } else { write_socket_answer(buf); } } else if ( !strcmp(client_message, "update_status") ) { if ( check_camera(camhandle) ) { if ( !pslr_get_status(camhandle, &status) ) { sprintf( buf, "%d\n", 0); } else { sprintf( buf, "%d\n", 1); } write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_camera_name") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %s\n", 0, pslr_camera_name(camhandle)); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_lens_name") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %s\n", 0, get_lens_name(status.lens_id1, status.lens_id2)); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_current_shutter_speed") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %d/%d\n", 0, status.current_shutter_speed.nom, status.current_shutter_speed.denom); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_current_aperture") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %s\n", 0, format_rational( status.current_aperture, "%.1f")); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_current_iso") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %d\n", 0, status.current_iso); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_bufmask") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %d\n", 0, status.bufmask); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_auto_bracket_mode") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %d\n", 0, status.auto_bracket_mode); write_socket_answer(buf); } } else if ( !strcmp(client_message, "get_auto_bracket_picture_count") ) { if ( check_camera(camhandle) ) { sprintf(buf, "%d %d\n", 0, status.auto_bracket_picture_count); write_socket_answer(buf); } } else if ( !strcmp(client_message, "focus") ) { if ( check_camera(camhandle) ) { pslr_focus(camhandle); sprintf(buf, "%d\n", 0); write_socket_answer(buf); } } else if ( !strcmp(client_message, "shutter") ) { if ( check_camera(camhandle) ) { pslr_shutter(camhandle); sprintf(buf, "%d\n", 0); write_socket_answer(buf); } } else if ( (arg = is_string_prefix( client_message, "delete_buffer")) != NULL ) { int bufno = atoi(arg); if ( check_camera(camhandle) ) { pslr_delete_buffer(camhandle,bufno); sprintf(buf, "%d\n", 0); write_socket_answer(buf); } } else if ( (arg = is_string_prefix( client_message, "get_preview_buffer")) != NULL ) { int bufno = atoi(arg); if ( check_camera(camhandle) ) { uint8_t *pImage; uint32_t imageSize; if ( pslr_get_buffer(camhandle, bufno, PSLR_BUF_PREVIEW, 4, &pImage, &imageSize) ) { sprintf(buf, "%d %d\n", 1, imageSize); write_socket_answer(buf); } else { sprintf(buf, "%d %d\n", 0, imageSize); write_socket_answer(buf); write_socket_answer_bin(pImage, imageSize); } } } else if ( (arg = is_string_prefix( client_message, "get_buffer")) != NULL ) { int bufno = atoi(arg); if ( check_camera(camhandle) ) { uint32_t imageSize; if ( pslr_buffer_open(camhandle, bufno, PSLR_BUF_DNG, 0) ) { sprintf(buf, "%d\n", 1); write_socket_answer(buf); } else { imageSize = pslr_buffer_get_size(camhandle); sprintf(buf, "%d %d\n", 0, imageSize); write_socket_answer(buf); uint32_t current = 0; while (1) { uint32_t bytes; uint8_t buf[65536]; bytes = pslr_buffer_read(camhandle, buf, sizeof (buf)); if (bytes == 0) { break; } write_socket_answer_bin( buf, bytes); current += bytes; } pslr_buffer_close(camhandle); } } } else if ( (arg = is_string_prefix( client_message, "set_shutter_speed")) != NULL ) { if ( check_camera(camhandle) ) { // TODO: merge with pktriggercord-cli shutter speed parse if (sscanf(arg, "1/%d%c", &shutter_speed.denom, &C) == 1) { shutter_speed.nom = 1; sprintf(buf, "%d %d %d\n", 0, shutter_speed.nom, shutter_speed.denom); } else if ((sscanf(arg, "%f%c", &F, &C)) == 1) { if (F < 2) { F = F * 10; shutter_speed.denom = 10; shutter_speed.nom = F; } else { shutter_speed.denom = 1; shutter_speed.nom = F; } sprintf(buf, "%d %d %d\n", 0, shutter_speed.nom, shutter_speed.denom); } else { shutter_speed.nom = 0; sprintf(buf,"1 Invalid shutter speed value.\n"); } if (shutter_speed.nom) { pslr_set_shutter(camhandle, shutter_speed); } write_socket_answer(buf); } } else if ( (arg = is_string_prefix( client_message, "set_iso")) != NULL ) { if ( check_camera(camhandle) ) { // TODO: merge with pktriggercord-cli shutter iso if (sscanf(arg, "%d-%d%c", &auto_iso_min, &auto_iso_max, &C) != 2) { auto_iso_min = 0; auto_iso_max = 0; iso = atoi(arg); } else { iso = 0; } if (iso==0 && auto_iso_min==0) { sprintf(buf,"1 Invalid iso value.\n"); } else { pslr_set_iso(camhandle, iso, auto_iso_min, auto_iso_max); sprintf(buf, "%d %d %d-%d\n", 0, iso, auto_iso_min, auto_iso_max); } write_socket_answer(buf); } } else { write_socket_answer("1 Invalid servermode command\n"); } } if (read_size == 0) { DPRINT("Client disconnected\n"); fflush(stdout); } else if (read_size == -1) { fprintf(stderr, "recv failed\n"); } } return 0; }
void stop(){ ao(); camera_close(); disable_servos(); // stop all parts }
int main(int argc, char *argv[]) { int blob_count = 0, i = 0; int main_channel = 2; /* red - botguy */ double time_of_snapshot = 0.0; double start = 0.0; printf("args: %d\n", argc); if (argc > 1) { if (argv[1][0] == '0') main_channel = 0; else if (argv[1][0] == '1') main_channel = 1; else if (argv[1][0] == '2') main_channel = 2; else if (argv[1][0] == '3') main_channel = 3; } for (i = 0; i < argc; i++) { printf("arg[%d] = %s\n", i, argv[i]); } printf("\n"); int area[NUM_TRACKING]; point2 center[NUM_TRACKING]; rectangle blob[NUM_TRACKING]; //int area1 = 0, areaX = 0; //point2 center; init_grid(); //wait_for_light(0); camera_open(); printf("Num channels: %d", get_channel_count()); camera_update(); time_of_snapshot = seconds(); msleep(300); start = seconds(); do { // Initialize memset(center, 0, sizeof(center)); memset(area, 0, sizeof(area)); clear_grid(); // Begin // Do not do anything with the camera until enough time has passed if (seconds() - time_of_snapshot >= 0.1) { camera_update(); time_of_snapshot = seconds(); blob_count = get_object_count(main_channel); //sprintf(msg, "#:%2d", blob_count); diag(); printf("#:%-2d ", blob_count); if (blob_count > count_of(area)) { blob_count = count_of(area); } if (blob_count > 0) { for (i = 0; i < blob_count; ++i) { area[i] = get_object_area(main_channel, i); center[i] = get_object_center(main_channel, i); blob[i] = get_object_bbox(main_channel, i); plot(&blob[i], i + '0'); } for (i = 0; i < blob_count; ++i) { printf(" %3d", blob[i].width * blob[i].height); } for (; i < count_of(blob); ++i) { printf(" "); } printf(" "); for (i = 0; i < blob_count; ++i) { printf(" %2dx%2d", blob[i].width, blob[i].height); } //show_xy(center[0].x, center[0].y); } //printf(" |%3d %4d <-> %4d %3d| %s\n", delta_left, left, right, delta_right, message); printf("%s\n", message); message[0] = 0; diag(); show_grid(); } //printf("The time is: %f %f\n", seconds(), seconds() - start); } while (seconds() - start <= 4.0); camera_close(); return 0; }
int main() { int s32MainFd,temp; struct timespec ts = { 2, 0 }; //================================================= ringmalloc(640*480); errno = 0; if(!camera_init(&pic)) goto error_cam; if(!encoder_init(&pic)) goto error_encoder; if(!preview_init(&pic)) goto error_preview; get_filename(); printf("file:%s\n",mkv_filename); if(!output_init(&pic,mkv_filename)) goto error_output; if(!encoder_encode_headers(&encoded_pic)) goto error_output; memcpy(&header_pic,&encoded_pic,sizeof(encoded_pic)); header_pic.buffer=malloc(encoded_pic.length); memcpy(header_pic.buffer,encoded_pic.buffer,encoded_pic.length); if(!output_write_headers(&encoded_pic,&psp)) goto error_output; encoder_release(&encoded_pic); if(!camera_on()) goto error_cam_on; //================================================ printf("RTSP server START\n"); PrefsInit(); printf("listen for client connecting...\n"); signal(SIGINT, IntHandl); s32MainFd = tcp_listen(SERVER_RTSP_PORT_DEFAULT); /* 初始化schedule_list 队列,创建调度线程,参考 schedule.c */ if (ScheduleInit(&pic,&encoded_pic) == ERR_FATAL) { fprintf(stderr,"Fatal: Can't start scheduler %s, %i \nServer is aborting.\n", __FILE__, __LINE__); return 0; } /* 将所有可用的RTP端口号放入到port_pool[MAX_SESSION] 中 */ RTP_port_pool_init(RTP_DEFAULT_PORT); //循环等待 if((temp = pthread_create(&thread[0], NULL, cam_thread, NULL)) != 0) printf("cam_thread error!\n"); else printf("cam_thread ok\n"); pthread_mutex_init(&mut,NULL); while (!g_s32Quit) { nanosleep(&ts, NULL); /*查找收到的rtsp连接, * 对每一个连接产生所有的信息放入到结构体rtsp_list中 */ // trace_point(); EventLoop(s32MainFd); } ringfree(); printf("The Server quit!\n"); camera_off(); error_cam_on: output_close(); error_output: preview_close(); error_preview: encoder_close(); error_encoder: camera_close(); error_cam: return NULL; }
// NOTE, in this code I am purposely ignoring error return codes for the sake of clarity while // walking through the code. Normally, you should check the return codes for errors, as this // will greatly help to isolate bugs. static int init_camera() { camera_error_t err; unsigned int num; unsigned int i; camera_unit_t cams[CAMERA_UNIT_NUM_UNITS]; camera_unit_t unit; // here are 2 ways to determine which cameras are available on a given device... #if 0 // METHOD 1 // inventory the available camera units // NOTE: to just find the number of available cameras: camera_get_cameras(0, &num, NULL); camera_get_supported_cameras(CAMERA_UNIT_NUM_UNITS, &num, cams); for (i=0; i<num; i++) { fprintf(stderr, "found camera unit %d\n", cams[i]); } #else // METHOD 2 // inventory cameras which support a given feature set - in this case PHOTO & BURST camera_feature_t features[] = { CAMERA_FEATURE_PHOTO, CAMERA_FEATURE_BURST }; camera_unit_t next = CAMERA_UNIT_NONE; num = 0; // note that this is an iterating function call which returns only one "next" unit at a time while (camera_find_capable(features, sizeof(features)/sizeof(*features), next, &next) == CAMERA_EOK) { cams[num++] = next; fprintf(stderr, "camera unit %d supports the required features\n", next); } #endif // open the first camera found unit = cams[0]; fprintf(stderr, "selecting camera unit %d\n", unit); err = camera_open(unit, CAMERA_MODE_RW | CAMERA_MODE_ROLL, &handle); if (err != CAMERA_EOK) { fprintf(stderr, "camera_open() failed: %d\n", err); return err; } // This is the minimal required configuration for a viewfinder. // NOTE: we need to enable burst mode when starting the viewfinder. // the maximum burst framerate is 15fps. don't trust me? you can query // camera_get_photo_vf_framerates() to determine the capabilities. err = camera_set_photovf_property(handle, CAMERA_IMGPROP_WIN_GROUPID, vf_group, CAMERA_IMGPROP_WIN_ID, "my_viewfinder", CAMERA_IMGPROP_BURSTMODE, 1, CAMERA_IMGPROP_FRAMERATE, (double)15.0); // max for burst is 15fps. if (err != CAMERA_EOK) { // NOTE: if you need to narrow down which setting is causing an error, // consider breaking the above command down into multiple calls. // be aware that some values must be changed together though (eg. height & width) // in order to pass range-checking fprintf(stderr, "camera_set_photovf_property() failed: %d\n", err); } else { // a valid photovf and photo configuration are required before starting // the viewfinder. // the defaults for both will always be sane, however if certain properties // are changed in one, they may need to be changed in the other. // resolution is one such example (aspect ratios must match). // here is an example configuration for photo properties - setting up the burst divisor. // this is only valid in burst viewfinder mode, and will cause the camera service to // only deliver every 3rd frame. (note that fractional rates are allowed). // why provide a divisor instead of just setting 5fps? camera_set_photo_property(handle, CAMERA_IMGPROP_BURSTDIVISOR, (double)3.0); // DOUBLE! // callbacks are optional, however status callback is useful for detecting asynchronous events // unless your application requires processing of viewfinder frame data, don't bother with // a viewfinder callback, as it incurs some additional ipc overhead. remember, the viewfinder // window itself is already rendered by the camera service, not your app. // NOTE: we are passing main_bps_chid as the void* arg which will then // be delivered to all callbacks. main_bps_chid is already a global variable, // so this isn't necessary, but is just done here to illustrate the convention. err = camera_start_photo_viewfinder(handle, NULL, //&viewfinder_callback, &status_callback, (void*)main_bps_chid); // user-defined arg. if (err != CAMERA_EOK) { fprintf(stderr, "camera_start_photo_viewfinder() failed: %d\n", err); } else { // successfully started viewfinder // if it's a front-facing camera, we should mirror the viewfinder once // we receive it. if (unit == CAMERA_UNIT_FRONT) { shouldmirror = true; } return 0; } } // clean up on error camera_close(handle); handle = CAMERA_HANDLE_INVALID; return err; }
// NOTE, in this code I am purposely ignoring error return codes for the sake of clarity while // walking through the code. Normally, you should check the return codes for errors, as this // will greatly help to isolate bugs. static int init_camera() { camera_error_t err; unsigned int num; unsigned int i; camera_unit_t cams[CAMERA_UNIT_NUM_UNITS]; camera_unit_t unit; // here are 2 ways to determine which cameras are available on a given device... #if 1 // METHOD 1 // inventory the available camera units // NOTE: to just find the number of available cameras: camera_get_cameras(0, &num, NULL); camera_get_supported_cameras(CAMERA_UNIT_NUM_UNITS, &num, cams); for (i=0; i<num; i++) { fprintf(stderr, "found camera unit %d\n", cams[i]); } #else // METHOD 2 // inventory cameras which support a given feature set - in this case PHOTO & VIDEO camera_feature_t features[] = { CAMERA_FEATURE_PHOTO, CAMERA_FEATURE_VIDEO }; camera_unit_t next = CAMERA_UNIT_NONE; num = 0; // note that this is an iterating function call which returns only one "next" unit at a time while (camera_find_capable(features, sizeof(features)/sizeof(*features), next, &next) == CAMERA_EOK) { cams[num++] = next; fprintf(stderr, "camera unit %d supports the required features\n", next); } #endif // open the first camera found unit = cams[0]; fprintf(stderr, "selecting camera unit %d\n", unit); err = camera_open(unit, CAMERA_MODE_RW | CAMERA_MODE_ROLL, &handle); if (err != CAMERA_EOK) { fprintf(stderr, "camera_open() failed: %d\n", err); return err; } // This is the minimal required configuration for a viewfinder. err = camera_set_photovf_property(handle, CAMERA_IMGPROP_WIN_GROUPID, vf_group, CAMERA_IMGPROP_WIN_ID, "my_viewfinder"); #if 0 // here is a more complex example configuration: err = camera_set_photovf_property(handle, CAMERA_IMGPROP_WIN_GROUPID, vfWndGroupId, CAMERA_IMGPROP_WIN_ID, vfWndWindowId, CAMERA_IMGPROP_WIDTH, w, CAMERA_IMGPROP_HEIGHT, h, CAMERA_IMGPROP_HWOVERLAY, 1, #ifdef DO_PHOTO_180 CAMERA_IMGPROP_ROTATION, (rotation+180) % 360, #else CAMERA_IMGPROP_ROTATION, rotation, #endif #ifdef DO_BURST CAMERA_IMGPROP_BURSTMODE, 1, #endif CAMERA_IMGPROP_FRAMERATE, 15.0); #endif if (err != CAMERA_EOK) { // NOTE: if you need to narrow down which setting is causing an error, // consider breaking the above command down into multiple calls. // be aware that some values must be changed together though (eg. height & width) // in order to pass range-checking fprintf(stderr, "camera_set_photovf_property() failed: %d\n", err); } else { // a valid photovf and photo configuration are required before starting // the viewfinder. // the defaults for both will always be sane, however if certain properties // are changed in one, they may need to be changed in the other. // resolution is one such example (aspect ratios must match). // here is an example configuration for photo properties. (just updating rotation) camera_set_photo_property(handle, CAMERA_IMGPROP_ROTATION, 180); // callbacks are optional, however status callback is useful for detecting asynchronous events // unless your application requires processing of viewfinder frame data, don't bother with // a viewfinder callback, as it incurs some additional ipc overhead. remember, the viewfinder // window itself is already rendered by the camera service, not your app. err = camera_start_photo_viewfinder(handle, &viewfinder_callback, &status_callback, (void*)123); // arbitrary user argument if (err != CAMERA_EOK) { fprintf(stderr, "camera_start_photo_viewfinder() failed: %d\n", err); } else { // successfully started viewfinder // if it's a front-facing camera, we should mirror the viewfinder once // we receive it. if (unit == CAMERA_UNIT_FRONT) { shouldmirror = true; } return 0; } } // clean up on error camera_close(handle); handle = CAMERA_HANDLE_INVALID; return err; }
void endProgram(void) { camera_stop(fdCamera); camera_close(fdCamera); }