int main() { camera_open(); ssp(SORT_SERVO,START); ssp(RED_SERVO,RED_START); ssp(GREEN_SERVO,GREEN_START); wait_for_light(0); start(); shut_down_in(119); enable_servos(); msleep(5000); reset(); right(47,0); forward(35); right(10,0); backward(46); full_sort(); other_side(); ssp(GREEN_SERVO,GREEN_DUMP); msleep(1000); ssp(GREEN_SERVO,GREEN_START); msleep(250); now(); ao(); disable_servos(); }
static void bb10capture_open_camera(BB10Capture *d) { camera_error_t error; if (d->camera_openned) { ms_warning("[bb10_capture] camera already openned, skipping..."); return; } ms_message("[bb10_capture] openning %s camera", d->camera == CAMERA_UNIT_FRONT ? "front" : (d->camera == CAMERA_UNIT_REAR ? "rear" : "unknown")); error = camera_open(d->camera, CAMERA_MODE_RW, &(d->cam_handle)); if (error == CAMERA_EOK) { camera_set_vf_mode(d->cam_handle, CAMERA_VFMODE_VIDEO); camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_WIDTH, d->vsize.width, CAMERA_IMGPROP_HEIGHT, d->vsize.height); camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_FORMAT, CAMERA_FRAMETYPE_NV12); ms_debug("[bb10_capture] camera capture vsize: %i,%i", d->vsize.width, d->vsize.height); if (d->framerate > 0) { camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_VARIABLEFRAMERATE, 1); camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_MINFRAMERATE, (double)d->framerate, CAMERA_IMGPROP_FRAMERATE, (double)d->framerate); } int rotation = d->rotation; if (!d->is_front_cam) { rotation = 360 - d->rotation; } camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_ROTATION, rotation); ms_debug("[bb10_capture] camera capture rotation: %i", rotation); d->camera_openned = TRUE; } else { ms_error("[bb10_capture] openning %i camera failed: %s", d->camera, error_to_string(error)); } }
bool BbCameraSession::isCaptureModeSupported(QCamera::CaptureModes mode) const { if (m_handle == CAMERA_HANDLE_INVALID) { // the camera has not been loaded yet via QCamera::load(), so // we open it temporarily to peek for the supported capture modes camera_unit_t unit = CAMERA_UNIT_REAR; if (m_device == cameraIdentifierFront()) unit = CAMERA_UNIT_FRONT; else if (m_device == cameraIdentifierRear()) unit = CAMERA_UNIT_REAR; else if (m_device == cameraIdentifierDesktop()) unit = CAMERA_UNIT_DESKTOP; camera_handle_t handle; const camera_error_t result = camera_open(unit, CAMERA_MODE_RW, &handle); if (result != CAMERA_EOK) return true; const bool supported = isCaptureModeSupported(handle, mode); camera_close(handle); return supported; } else { return isCaptureModeSupported(m_handle, mode); } }
struct tlv_packet *webcam_list(struct tlv_handler_ctx *ctx) { struct tlv_packet *p = tlv_packet_response_result(ctx, TLV_RESULT_SUCCESS); for (int i=0;i<10;i++) { int fd = camera_open(i); if (fd == -1) { continue; } struct v4l2_capability cap; int result = xioctl(fd, VIDIOC_QUERYCAP, &cap); if (result == -1) { if (errno == EINVAL) { break; } else { continue; } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { continue; } if (!(cap.capabilities & V4L2_CAP_STREAMING)) { continue; } p = tlv_packet_add_str(p, TLV_TYPE_WEBCAM_NAME, (const char*)cap.card); } return p; }
int main() { Button button; int status; start_buttons(); status = camera_open(); if (status == FALSE) { printf("Could not open the camera.\n"); return 1; } while (TRUE) { button = button_pressed(); switch (button) { case A_BUTTON: cv_show_image(RAW_IMAGE); break; case B_BUTTON: cv_show_image(GRAYSCALE); break; case C_BUTTON: cv_show_image(CANNY); break; case X_BUTTON: cv_show_image(HOUGHLINES); break; case Y_BUTTON: camera_update(); break; case Z_BUTTON: cv_show_image(RAW_IMAGE); break; } } camera_close(); return 0; }
int main(int argc, char* argv[]) { char* device = argc > 1 ? argv[1] : "/dev/video0"; camera_t* camera = camera_open(device); if (!camera) { fprintf(stderr, "[%s] %s\n", device, strerror(errno)); return EXIT_FAILURE; } char name[5]; camera_format_t format; camera_config_get(camera, &format); camera_format_name(format.format, name); puts("[current config]"); printf("- [%s] w: %d, h: %d, fps: %d/%d\n", name, format.width, format.height, format.interval.denominator, format.interval.numerator); puts("[available formats]"); camera_formats_t* formats = camera_formats_new(camera); for (size_t i = 0; i < formats->length; i++) { camera_format_name(formats->head[i].format, name); printf("- [%s] w: %d, h: %d, fps: %d/%d\n", name, formats->head[i].width, formats->head[i].height, formats->head[i].interval.denominator, formats->head[i].interval.numerator); } camera_formats_delete(formats); camera_close(camera); return EXIT_SUCCESS; }
int main() { int lspeed = 10; int hspeed = 80; camera_open(LOW_RES); while (side_button() == 0) { camera_update(); if (get_object_center(0 , 0).x < 45) { motor(1 , lspeed); motor(3 , hspeed); printf("LEFT\n"); } if (get_object_center(0 , 0).x > 115) { motor(1 , hspeed); motor(3 , lspeed); printf("RIGHT\n"); } if (get_object_center(0 , 0).x >= 45 && get_object_center(0 , 0).x <= 115) { motor(1 , hspeed); motor(3 , hspeed); printf("CENTER\n"); } } return 0; }
// prepare: void prepare(){ camera_open(LOW_RES); camera_load_config(CONFIG); camera_update(); camera_update(); camera_update(); // prepare camera enable_servos(); set_servo_position(BSV, BSV_LEVEL); set_servo_position(ASV, ASV_BACK); set_servo_position(RSV, RSV_DOWN); set_servo_position(SSV, SSV_BACK); // prepare servos set_analog_pullup(FSS, 0); set_analog_pullup(BSS, 0); // prepare sensors display_clear(); printf("prepare succeeds"); while(digital(TSS) == 0); msleep(1500); //wait_for_light(SSS); //shut_down_in(TIME_LIMIT); // show controller that coke is ready }
int main() { printf("Hello, World!\n"); camera_open(LOW_RES); press_A_to_continue(); printf("Number of objects on channel 1: %i\n", get_object_count(1)); printf("Object data length on channel 1: %i\n", get_object_data_length(1, 0)); return 0; }
static void bb10camera_detect(MSWebCamManager *obj) { camera_error_t error; camera_handle_t handle; error = camera_open(CAMERA_UNIT_FRONT, CAMERA_MODE_RW, &handle); if (error == CAMERA_EOK) { if (camera_has_feature(handle, CAMERA_FEATURE_VIDEO)) { if (camera_can_feature(handle, CAMERA_FEATURE_VIDEO)) { MSWebCam *cam = ms_web_cam_new(&ms_bb10_camera_desc); cam->name = ms_strdup("BB10 Front Camera"); ms_message("[bb10_capture] camera added: %s", cam->name); ms_web_cam_manager_add_cam(obj, cam); camera_close(handle); } else { ms_warning("[bb10_capture] front camera has video feature but can't do it..."); } } else { ms_warning("[bb10_capture] front camera doesn't have video feature"); } } else { ms_warning("[bb10_capture] Can't open front camera: %s", error_to_string(error)); } error = camera_open(CAMERA_UNIT_REAR, CAMERA_MODE_RW, &handle); if (error == CAMERA_EOK) { if (camera_has_feature(handle, CAMERA_FEATURE_VIDEO)) { if (camera_can_feature(handle, CAMERA_FEATURE_VIDEO)) { MSWebCam *cam = ms_web_cam_new(&ms_bb10_camera_desc); cam->name = ms_strdup("BB10 Rear Camera"); ms_message("[bb10_capture] camera added: %s", cam->name); ms_web_cam_manager_add_cam(obj, cam); camera_close(handle); } else { ms_warning("[bb10_capture] rear camera has video feature but can't do it..."); } } else { ms_warning("[bb10_capture] rear camera doesn't have video feature"); } } else { ms_warning("[bb10_capture] Can't open rear camera: %s", error_to_string(error)); } }
int main(int argc, char **argv) { signal(SIGPIPE,handle_sig); g_set_application_name("Camera Test - By Feesh! (A lot of code borrowed from Sonicsnap-gui-1.7)"); gtk_init(&argc, &argv); if(argc>1) network_init(argv[1]); else network_init("localhost"); if(argc>2) camera_open(argv[2],WIDTH, HEIGHT); else camera_open("/dev/video0",WIDTH, HEIGHT); gui_init(); gtk_main(); return 0; }
// check the ball: int checkBalls(int color){ camera_open(); camera_update(); if ( get_object_area(color, 0) > 3000 ){ return 0; } return 1; }
int main () { camera_open(); depth_open(); while(!(a_button_clicked())){ camera_update(); depth_update(); display_clear(); printf("prepare succeeds"); } return 0; }
int main() { printf("hello\n"); camera_open(LOW_RES); camera_update(); printf("Area: %i\n", get_object_area(0, 0)); CvMat* x; Mat y; return 1; }
int main(int argc, char* argv[]) { if(!camera_open()) { printf("Failed to open camera.\n"); return 1; } camera_update(); printf("Closing camera...\n"); camera_close(); return 0; }
int main(int argc, char **argv) { struct camera *cam = NULL; cam = (struct camera *)malloc(sizeof(struct camera)); if (!cam) { printf("malloc camera failure!\n"); exit(1); } memset(cam, 0, sizeof(struct camera)); cam->device_name = "/dev/video0"; cam->buffers = NULL; cam->width = 640; cam->height = 480; cam->display_depth = 5; /* RGB24 */ cam->h264_file_name = "test.h264"; camera_open(cam); camera_init(cam); camera_capturing_start(cam); h264_compress_init(&cam->en, cam->width, cam->height); cam->h264_buf = (uint8_t *) malloc(sizeof(uint8_t) * cam->width * cam->height * 3); // 设置缓冲区 if ((cam->h264_fp = fopen(cam->h264_file_name, "wa+")) == NULL) { printf("open file error!\n"); return -1; } while (1) { if (read_and_encode_frame(cam) < 0) { fprintf(stderr, "read_fram fail in thread\n"); //break; } } printf("-----------end program------------"); if (cam->h264_fp != NULL) fclose(cam->h264_fp); h264_compress_uninit(&cam->en); free(cam->h264_buf); camera_capturing_stop(cam); camera_uninit(cam); camera_close(cam); free(cam); return 0; }
void prepare() { camera_open(LOW_RES); camera_load_config(CONFIG); camera_update(); enable_servos(); set_servo_position(BSV, BSV_LEVEL); set_servo_position(ASV, ASV_BACK); set_servo_position(RSV, RSV_DOWN); set_servo_position(SSV, SSV_BACK); display_clear(); printf("prepare succeeds"); while(!(a_button_clicked())); }
int main() { //while the a button isn't pressed int x, y, color=0; //sets all variables to 0 camera_open(LOW_RES); while(side_button()==0) { //sets camera to lowest resolution for minimal lag //camera takes a picture if(get_object_count(color) > 0) // If the right color is detected, run this code { x = get_object_center(color,0).x; y = get_object_center(color,0).y; //x and y variables now represent x and y coordinates of the biggest color blob if(x>80) //if the x coordinate is on the right { printf("The biggest blob is on the right and the coordinates are (%d,%d)", x ,y); //prints the words } if(x<80) //if x is on th left { printf("The biggest blob is on the left and the coordinates are (%d,%d)" , x ,y); //print the words } else { printf("no object in sight"); } camera_update(); } } }
void ScreenCapture::openCamera() { _mode = CAMERA_MODE_RW | CAMERA_MODE_ROLL; if (!_cameraOpen) { _err = camera_open(_unit, _mode, &_handle); if (_err != EOK) { qDebug() << "error " << _err << " opening camera: " << strerror(_err) << "\n"; } else { qDebug() << "camera opened ... \n"; _cameraOpen = true; } } }
void start(int servo_position) { int k; camera_open(); for (k = 0; k < 10; k = k+1) { camera_update(); msleep(200); } set_servo_position(1, servo_position); enable_servos(); msleep(1000); wait_for_light(7); shut_down_in(115); }
// Initialize Camera and Sensors void initDevices(void) { struct pxacam_setting camset; // Backboard uart init fdBackBoard = openSerial(); // 3-axis sensor init fdThreeAxis = open("/dev/MMA_ADC", O_RDONLY); ASSERT(fdThreeAxis); ioctl(fdThreeAxis,MMA_VIN_ON, 0); ioctl(fdThreeAxis,MMA_SLEEP_MODE_ON, 0); ioctl(fdThreeAxis,MMA_SENS_60G, 0); // infrared sensor init fdInfra = open("/dev/FOUR_ADC", O_NOCTTY); ASSERT(fdInfra); // Camera init fdCamera = camera_open(NULL,0); ASSERT(fdCamera); memset(&camset,0,sizeof(camset)); camset.mode = CAM_MODE_VIDEO; camset.format = pxavid_ycbcr422; camset.width = 320; camset.height = 240; camera_config(fdCamera,&camset); camera_start(fdCamera); fdOverlay2 = overlay2_open(NULL,pxavid_ycbcr422,NULL, 320, 240, 0 , 0); overlay2_getbuf(fdOverlay2, &vidbuf_overlay); len_vidbuf = vidbuf_overlay.width * vidbuf_overlay.height; cImg.width=camset.width*2; cImg.height=camset.height; // init finish printf("Initializing Device Finished\n"); }
struct tlv_packet *webcam_start(struct tlv_handler_ctx *ctx) { uint32_t deviceIndex = 0; uint32_t quality = 0; tlv_packet_get_u32(ctx->req, TLV_TYPE_WEBCAM_INTERFACE_ID, &deviceIndex); tlv_packet_get_u32(ctx->req, TLV_TYPE_WEBCAM_QUALITY, &quality); int result = camera_open(deviceIndex - 1); if (result == -1) { return tlv_packet_response_result(ctx, TLV_RESULT_FAILURE); } result = camera_start(); if (result == -1) { return tlv_packet_response_result(ctx, TLV_RESULT_FAILURE); } return tlv_packet_response_result(ctx, TLV_RESULT_SUCCESS); }
int main(){ camera_open(LOW_RES); camera_load_config("color.config"); while(!(a_button_clicked())){ display_clear(); camera_update(); int objNum = get_object_count(GREEN); if(objNum == 0) printf("NO SUCH OBJECT!!!!!!!!"); else if(objNum > 0){ point2 objCen = get_object_center(GREEN, 0); int objArea = get_object_area(GREEN, 0); printf("X:\t%d\nY:\t%d\n", objCen.x, objCen.y); printf("Area:\t%d\n", objArea); } msleep(100); } return 0; }
int main() { camera_open(); camera_update(); graphics_open(get_camera_width(), get_camera_height()); while(!get_key_state('Q')) { camera_update(); graphics_blit_enc(get_camera_frame(), BGR, 0, 0, get_camera_width(), get_camera_height()); graphics_circle_fill(get_camera_width() / 2, get_camera_height() / 2, get_camera_height() / 20, 255, 0, 0); graphics_update(); } camera_close(); graphics_close(); return 0; }
FlashlightPrivate::FlashlightPrivate() : camHandle { CAMERA_HANDLE_INVALID }, lightOn { false } { auto error = camera_open(CAMERA_UNIT_REAR, CAMERA_MODE_PREAD | CAMERA_MODE_PWRITE, &camHandle); if (error == CAMERA_EOK) { auto hasLight = camera_can_feature(camHandle, CAMERA_FEATURE_VIDEOLIGHT); if (!hasLight) { qDebug("Flashlight error: video light not available."); error = camera_close(camHandle); if (error != CAMERA_EOK) { qDebug("Flashlight error: failed to close camera: %s.", errorStr(error).c_str()); } camHandle = CAMERA_HANDLE_INVALID; } } else { qDebug("Flashlight error: failed to open camera: %s.", errorStr(error).c_str()); } }
int main(){ armUp(); clawOpen(); camera_open(LOW_RES); double start_wait=seconds(); while((seconds()-start_wait)<=25){//wait for poms or for 25 seconds int i=0; while(i<10){//picks latest image from the buffer camera_update(); i++; } if(get_object_count(chan)>0){ break; }else{ stop(0.1); } } while((seconds()-start_wait)<=60){ int area=get_object_area(chan, 0); if(area>=600){ int i=0; while(i<10){ camera_update(); i++; } int x=get_object_center(chan, 0).x; if(x<65){ rightF(0.1, 100, 80); }else{ leftF(0.1, 100, 80); } }else{ break; } } armDown(); clawClose(); armUp(); }
// Initialize Camera and Sensors void initDevices(void) { struct pxacam_setting camset; printf("-----Initializing Device Started-----\n"); // Backboard uart init fdBackBoard = openSerial(); printf("Initializing BackBoard complete!\n"); // 적외선 센서 init fdInfra = open("/dev/FOUR_ADC", O_NOCTTY); // Camera init fdCamera = camera_open(NULL,0); ASSERT(fdCamera); system("echo b > /proc/invert/tb"); //LCD DriverIC top-bottom invert ctrl memset(&camset,0,sizeof(camset)); camset.mode = CAM_MODE_VIDEO; camset.format = pxavid_ycbcr422; camset.width = MAX_X; camset.height = MAX_Y; camera_config(fdCamera,&camset); camera_start(fdCamera); fdOverlay2 = overlay2_open(NULL,pxavid_ycbcr422,NULL, MAX_X, MAX_Y, 0 , 0); overlay2_getbuf(fdOverlay2, &vidbuf_overlay); len_vidbuf = vidbuf_overlay.width * vidbuf_overlay.height; printf("Initializing Camera complete!\n"); // init finish printf("-----Initializing Device Finished-----\n"); }
void main() { calibrate(); set_servo_position(CAM_SERVO,CAM_SERVO_POSITION_RIGHT); camera_open(LOW_RES); enable_servos(); set_analog_pullup(3,0); while(!b_button()){ claw_at_linefollowing(); do { camera_update(); linefolowing(); } while(get_object_count(green) ==0); //linefolowing bis er was sieht if(get_object_bbox(0,0).width>BLOB_SIZE_TAKE&&get_object_bbox(0,0).height>BLOB_SIZE_TAKE){ takepom(); } printf("kompletdurch \n"); } disable_servos(); stop(); }
//start 48500 //down 58000 void main() { set_servo_position(Servo_Back,Servo_Back_Up); set_servo_position(Servo_Left,Servo_Left_Closed); set_servo_position(Servo_Right,Servo_Right_Closed); enable_servos(); calibrate(); //wait for light //printf("wait for light oida"); //set_b_button_text("I am the Twilight"); //while(!b_button()){} wait_for_light(Sensor_Light); //shutdown stuff shut_down_in(115); start=seconds(); //start position for create start_position(); //we see everything camera_open(); //drive in front of the cubes take_position(); //watch for cubes cube_is_near(); ao(); //bring the first cube to the pipes bringback(); //take the 2nd cube and bring them to the pipes bringback2cube(); }
int main() { printf("beginning camera calibration pgm\n"); create_connect(); enable_servos(); camera_open(LOW_RES); printf("1.0 after initialization\n"); //place create for camera recognition of cube positions forward(9); rightAngle(LEFT); forward(15); rightAngle(RIGHT); calibrateCamera(); printf("the end...\n"); camera_close(); create_disconnect(); return 0; }