static int initCamera(moveContext *context) { int ret; context->camInfo.container = NULL; ret = cameraInit (); printf ("cameraInit() returned %d\n", ret); if (ret == 0) { cameraType type = CAM_TYPE_UNKNOWN; ret = cameraGetType (0, &type); if (ret == 0 && type == CAM_TYPE_PLAYSTATION_EYE) { context->camInfo.format = CAM_FORM_RAW8; context->camInfo.framerate = 60; context->camInfo.resolution = CAM_RESO_VGA; context->camInfo.info_ver = 0x0101; ret = sysMemContainerCreate (&context->camInfo.container, 0x200000); printf ("sysMemContainerCreate() for camera container returned %d\n", ret); ret = cameraOpenEx (0, &context->camInfo); switch (ret) { case 0: printf ("Found me an eye, arrr!\n"); printf ("cameraOpenEx returned %08X\n", ret); printf ("Video dimensions: %dx%d\n", context->camInfo.width, context->camInfo.height); printf ("Buffer at %08X\n", context->camInfo.buffer); printf ("pbuf0 Buffer at %08X\n", context->camInfo.pbuf[0]); printf ("pbuf0 Buffer at %08X\n", context->camInfo.pbuf[1]); printf ("context->camInfo.info_ver %X\n", context->camInfo.info_ver); context->camRead.buffer = context->camInfo.buffer; context->camRead.version = 0x0100; printf ("Setting CameraReadEx %08X buffer to cameraInfoex buffer \n", context->camRead.buffer); break; default: printf ("Error %X detected opening PlayStation Eye\n", ret); goto error; } } else { printf ("Device detected is not a PlayStation Eye and this sample need it\n"); goto error; } } else { goto error; } return ret; error: if (context->camInfo.container) sysMemContainerDestroy (context->camInfo.container); return ret; }
int init(void *handle) { MAIN *m = handle; m->var.state = STATE_DUMMY; m->var.newstate = STATE_INVENTORY; /* TODO: Måste fixas när menysystem och allt det där implementeras */ // m->var.newstate = STATE_OVERWORLD; if (cameraInit(m) != 0); else if (systemInit(m) != 0); else if (mapInit(m) != 0); else if (textboxInit(m) != 0); else if (npcInit(m) != 0); else return 0; return -1; }
//-------------------------------------------------------------- void testApp::setup(){ int boxSize = 1000; int spacing = boxSize / 10; // sphere = TriangulatedSphere(); outerGrid = OuterGrid(boxSize, spacing); cylinder = TriangulatedCylinder(1500, 2); cylinder2 = TriangulatedCylinder(2000, 3); /* Black background */ ofBackground(0,0,0); ofSetVerticalSync(true); openGlInit(); cameraInit(); // useCamera = false; }
int HyVisionHvr2130Camera::Enable(void) { if(! cameraInit(_cameraWidth, _cameraHeight) ) { PrintMessage("ERROR : HyVisionHvr2130Camera::Enable() -> Can't init camera"); return API_ERROR; } if( !_cameraBuf ) { _cameraBuf = new unsigned char[_cameraWidth*_cameraHeight*2]; } if( !_cameraBufBMP ) { _cameraBufBMP = new unsigned char[_cameraWidth*_cameraHeight*3]; } PrintMessage("SUCCESS : HyVisionHvr2130Camera::Enable()\n"); return API_SUCCESS; }
/*----------------------------------------------------------------------------- Name : svStartup() Description : Inputs : Outputs : Return : ----------------------------------------------------------------------------*/ void svStartup(void) { cameraInit(&svCamera, SV_InitialCameraDistance); svCamera.angle = DEG_TO_RAD(svAngle); svCamera.declination = DEG_TO_RAD(svDeclination); feDrawCallbackAdd(SV_ShipViewName, svShipViewRender); //add render callbacks feDrawCallbackAdd(SV_FirepowerName, svFirepowerRender); feDrawCallbackAdd(SV_CoverageName, svCoverageRender); feDrawCallbackAdd(SV_ManeuverName, svManeuverRender); feDrawCallbackAdd(SV_ArmorName, svArmorRender); feDrawCallbackAdd(SV_TopSpeedName, svTopSpeedRender); feDrawCallbackAdd(SV_MassName, svMassRender); svMouseInside = FALSE; svMousePressLeft = FALSE; svMousePressRight = FALSE; svShipViewFont = frFontRegister(svShipViewFontName); svShipStatFont = frFontRegister(svShipStatFontName); }
int main(void) { //Variable Declarations initSolenoid(); /* initialize solenoid valve */ TIM_TIMERCFG_Type timerCfg; initTimeStruct(); RTC_TIME_Type* watertime = malloc(sizeof(RTC_TIME_Type)); uint8 fed = 0; uint8 watered = 0; watertime->HOUR = 5; watertime->MIN = 0; //Initialize timer0 for delays TIM_ConfigStructInit(TIM_TIMER_MODE, &timerCfg); /* initialize timer config struct */ TIM_Init(LPC_TIM0, TIM_TIMER_MODE, &timerCfg); /* initialize timer0 */ //Initialize Real Time Clock RTC_Init(LPC_RTC); RTC_Cmd(LPC_RTC, ENABLE); RTC_ResetClockTickCounter(LPC_RTC); // Initialize Peripherals INIT_SDRAM(); /* initialize SDRAM */ servoInit(); /* initialize FSR servo motor for panning camera */ initStepper(); /* initialize stepper motor for dispensing food */ initFSR(); /* initialize force sensitive resistor circuit for food and water full signals */ initWiFi(AUTO_CONNECT); /* initialize WiFi module -- must be attached*/ audio_initialize(); audio_reset(); //audio_test(); audio_setupMP3(); int i = 0, retval; uint32 length; /* length variable for photo */ printf("Entering while loop\n\r"); //audio_storeVoice(); // Enter an infinite loop while(1) { if(STATE == DISPENSING_FOOD){ printf("Entering food dispense state\n\r"); /* Execute commands to dispense food */ //spinUntilFull(); spinStepper(300); reverseSpin(250); STATE = CONNECTED; } if(STATE == DISPENSING_WATER){ printf("Entering water dispense state\n\r"); /* Execute commands to dispense water */ fillWater(); STATE = CONNECTED; } if(STATE == CAPTURING){ printf("Entering camera taking state\n\r"); /* Initialize camera and set it up to take a picture */ if(cameraInit()) printf("Camera not initialized!\n\r"); retval = stopFrame(); length = getBufferLength(); printf("length: %i\n\r", length); /* Send length to Android application */ int temp_len = length; while(temp_len){ uart1PutChar(temp_len % 10); temp_len = temp_len / 10; } /* Send photo and finish set up */ getAndSendPhoto(length); resumeFrame(); STATE = CONNECTED; } if(STATE == TALKING1){ audio_playVoice(1); STATE = CONNECTED; } if(STATE == TALKING2){ audio_playVoice(2); STATE = CONNECTED; } if(STATE == TALKING3){ audio_playVoice(3); STATE = CONNECTED; } if(STATE == PAN_LEFT){ /* Execute commands to pan servo left */ panServo(LEFT); STATE = CONNECTED; } if(STATE == PAN_RIGHT){ /* Execute commands to pan servo right */ panServo(RIGHT); STATE = CONNECTED; } if(STATE == SCHEDULING){ /* Execute commands to schedule a feeding time */ STATE = CONNECTED; } /* Scheduling */ RTC_GetFullTime(LPC_RTC, time); //Fill water bowl at predetermined time if (time->HOUR == watertime->HOUR + 1 && watered == 1) watered = 0; if (watertime->HOUR == time->HOUR && watertime->MIN < time->MIN && watered == 0) { fillWater(); watered = 1; } //Feed dog on schedule if any cannot feed dog two consecutive hours for(i = 0; i < scheduled_feeds; i++) { if (time->HOUR == feedtime[i]->HOUR + 1 && fed == 1) fed = 0; if (feedtime[i]->HOUR == time->HOUR && feedtime[i]->MIN < time->MIN && fed == 0) { spinUntilFull(); fed = 1; } } } return 0; }
JNIEXPORT jint JNICALL Java_com_example_enzocamtest_CamView_startCamera(JNIEnv* env, jobject thiz, jstring deviceName, jint width, jint height) { int ret = 0; const char* dev_name = (*env)->GetStringUTFChars(env, deviceName, 0); /* Initialize all the structures we will be using */ mjpgDec = (struct decoderInstance *)calloc(1, sizeof(struct decoderInstance)); usbCam = (struct cameraInstance *)calloc(1, sizeof(struct cameraInstance)); camData = (struct mediaBuffer *)calloc(1, sizeof(struct mediaBuffer)); yuvData = (struct mediaBuffer *)calloc(1, sizeof(struct mediaBuffer)); y422_buf = (struct g2d_buf *)calloc(1, sizeof(struct g2d_buf)); /* Set properties for H264 AVC decoder */ mjpgDec->type = MJPEG; /* Set properties for USB camera */ usbCam->type = MJPEG; usbCam->width = width; usbCam->height = height; usbCam->fps = FPS; strcpy(usbCam->deviceName, dev_name); /* Init the VPU. This must be done before a codec can be used. If this fails, we need to bail. */ ret = vpuInit(); if (ret < 0) return -1; if (cameraInit(usbCam) < 0) ret = -1; /* In order to init mjpg decoder, it must be supplied with bitstream parse */ ret = cameraGetFrame(usbCam, camData); if (ret < 0) { err_msg("Could not get camera frame\n"); ret = -1; } if (decoderInit(mjpgDec, camData) < 0) { err_msg("Could not init MJPG decoder\n"); ret = -1; } y420_buf = g2d_alloc(width * height * 2, 0); rgb_buf = g2d_alloc(width * height * 2, 0); rgb_surf.planes[0] = rgb_buf->buf_paddr; rgb_surf.left = 0; rgb_surf.top = 0; rgb_surf.right = width; rgb_surf.bottom = height; rgb_surf.stride = width; rgb_surf.width = width; rgb_surf.height = height; rgb_surf.rot = G2D_ROTATION_0; rgb_surf.format = G2D_RGB565; y420_surf.planes[0] = y420_buf->buf_paddr; y420_surf.planes[1] = y420_surf.planes[0] + width*height; y420_surf.planes[2] = y420_surf.planes[1] + width*height/4; y420_surf.left = 0; y420_surf.top = 0; y420_surf.right = width; y420_surf.bottom = height; y420_surf.stride = width; y420_surf.width = width; y420_surf.height = height; y420_surf.rot = G2D_ROTATION_0; y420_surf.format = G2D_I420; info_msg("Finished setting up JNI codec and camera!\n"); return ret; }