void Player::flipLeft() { if ( X() > 0) { velocityX(-velocity[0]); setFlip(1); setFlipUp(0); } }
void Player::flipRight() { if ( X() < worldWidth-playerWidth) { velocityX(velocity[0]); setFlip(0); setFlipUp(0); } }
// constructor VideoFFmpeg::VideoFFmpeg (HRESULT * hRslt) : VideoBase(), m_codec(NULL), m_formatCtx(NULL), m_codecCtx(NULL), m_frame(NULL), m_frameDeinterlaced(NULL), m_frameRGB(NULL), m_imgConvertCtx(NULL), m_deinterlace(false), m_preseek(0), m_videoStream(-1), m_baseFrameRate(25.0), m_lastFrame(-1), m_eof(false), m_externTime(false), m_curPosition(-1), m_startTime(0), m_captWidth(0), m_captHeight(0), m_captRate(0.f), m_isImage(false), m_isThreaded(false), m_isStreaming(false), m_stopThread(false), m_cacheStarted(false) { // set video format m_format = RGB24; // force flip because ffmpeg always return the image in the wrong orientation for texture setFlip(true); // construction is OK *hRslt = S_OK; BLI_listbase_clear(&m_thread); pthread_mutex_init(&m_cacheMutex, NULL); BLI_listbase_clear(&m_frameCacheFree); BLI_listbase_clear(&m_frameCacheBase); BLI_listbase_clear(&m_packetCacheFree); BLI_listbase_clear(&m_packetCacheBase); }
void Reflection::setFlipAsActiveAmbiguity() { setFlip(activeAmbiguity); }
int CameraHal::CapturePicture() { int image_width, image_height, preview_width, preview_height; int capture_len; unsigned long base, offset; #ifdef R3D4_CONVERT CColorConvert* pConvert; //class for image processing #endif struct v4l2_buffer buffer; // for VIDIOC_QUERYBUF and VIDIOC_QBUF struct v4l2_format format; //struct v4l2_buffer cfilledbuffer; // for VIDIOC_DQBUF struct v4l2_requestbuffers creqbuf; // for VIDIOC_REQBUFS and VIDIOC_STREAMON and VIDIOC_STREAMOFF sp<MemoryBase> mPictureBuffer; sp<MemoryBase> mFinalPictureBuffer; sp<MemoryHeapBase> mJPEGPictureHeap; sp<MemoryBase> mJPEGPictureMemBase; ssize_t newoffset; size_t newsize; mCaptureFlag = true; int jpegSize; void* outBuffer; int err, i; int err_cnt = 0; int exifDataSize = 0; int thumbnaiDataSize = 0; unsigned char* pExifBuf = new unsigned char[64*1024]; int twoSecondReviewMode = getTwoSecondReviewMode(); int orientation = getOrientation(); LOG_FUNCTION_NAME if (CameraSetFrameRate()) { LOGE("Error in setting Camera frame rate\n"); return -1; } LOGD("\n\n\n PICTURE NUMBER =%d\n\n\n",++pictureNumber); mParameters.getPictureSize(&image_width, &image_height); mParameters.getPreviewSize(&preview_width, &preview_height); LOGV("mCameraIndex = %d\n", mCameraIndex); LOGD("Picture Size: Width = %d \t Height = %d\n", image_width, image_height); /* set size & format of the video image */ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; format.fmt.pix.width = image_width; format.fmt.pix.height = image_height; if(mCamera_Mode == CAMERA_MODE_JPEG) { format.fmt.pix.pixelformat = PIXEL_FORMAT_JPEG; capture_len = GetJPEG_Capture_Width() * GetJPEG_Capture_Height() * JPG_BYTES_PER_PIXEL; } else { format.fmt.pix.pixelformat = PIXEL_FORMAT; capture_len = image_width * image_height * UYV_BYTES_PER_PIXEL; } // round up to 4096 bytes if (capture_len & 0xfff) capture_len = (capture_len & 0xfffff000) + 0x1000; LOGV("capture: %s mode, pictureFrameSize = 0x%x = %d\n", (mCamera_Mode == CAMERA_MODE_JPEG)?"jpeg":"yuv", capture_len, capture_len); mPictureHeap = new MemoryHeapBase(capture_len); base = (unsigned long)mPictureHeap->getBase(); base = (base + 0xfff) & 0xfffff000; offset = base - (unsigned long)mPictureHeap->getBase(); // set capture format if (ioctl(camera_device, VIDIOC_S_FMT, &format) < 0) { LOGE ("Failed to set VIDIOC_S_FMT.\n"); return -1; } #if OMAP_SCALE if(mCameraIndex == VGA_CAMERA && mCamMode != VT_MODE) if(orientation == 0 || orientation == 180) setFlip(CAMERA_FLIP_MIRROR); #endif /* Shutter CallBack */ if(mMsgEnabled & CAMERA_MSG_SHUTTER) { mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie); } /* Check if the camera driver can accept 1 buffer */ creqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; creqbuf.memory = V4L2_MEMORY_USERPTR; creqbuf.count = 1; if (ioctl(camera_device, VIDIOC_REQBUFS, &creqbuf) < 0) { LOGE ("VIDIOC_REQBUFS Failed. errno = %d\n", errno); return -1; } buffer.type = creqbuf.type; buffer.memory = creqbuf.memory; buffer.index = 0; if (ioctl(camera_device, VIDIOC_QUERYBUF, &buffer) < 0) { LOGE("VIDIOC_QUERYBUF Failed"); return -1; } buffer.m.userptr = base; mPictureBuffer = new MemoryBase(mPictureHeap, offset, buffer.length); LOGD("Picture Buffer: Base = %p Offset = 0x%x\n", (void *)base, (unsigned int)offset); if (ioctl(camera_device, VIDIOC_QBUF, &buffer) < 0) { LOGE("CAMERA VIDIOC_QBUF Failed"); return -1; } /* turn on streaming */ if (ioctl(camera_device, VIDIOC_STREAMON, &creqbuf.type) < 0) { LOGE("VIDIOC_STREAMON Failed\n"); return -1; } LOGD("De-queue the next avaliable buffer\n"); /* De-queue the next avaliable buffer */ //try to get buffer from camearo for 10 times while (ioctl(camera_device, VIDIOC_DQBUF, &buffer) < 0) { LOGE("VIDIOC_DQBUF Failed cnt = %d\n", err_cnt); if(err_cnt++ > 10) { mNotifyCb(CAMERA_MSG_ERROR, CAMERA_DEVICE_ERROR_FOR_RESTART, 0, mCallbackCookie); mPictureBuffer.clear(); mPictureHeap.clear(); return NO_ERROR; } } PPM("AFTER CAPTURE YUV IMAGE\n"); /* turn off streaming */ if (ioctl(camera_device, VIDIOC_STREAMOFF, &creqbuf.type) < 0) { LOGE("VIDIOC_STREAMON Failed\n"); return -1; } #if OMAP_SCALE if(mCameraIndex == VGA_CAMERA && mCamMode != VT_MODE) if(orientation == 0 || orientation == 180) setFlip(CAMERA_FLIP_NONE); #endif // camera returns processed jpeg image if(mCamera_Mode == CAMERA_MODE_JPEG) { int JPEG_Image_Size = GetJpegImageSize(); int thumbNailOffset = 0; //m4mo doesnt store offset ? int yuvOffset =0; //m4mo doesnt store yuv image ? // int thumbNailOffset = GetThumbNailOffset(); // int yuvOffset = GetYUVOffset(); thumbnaiDataSize = GetThumbNailDataSize(); sp<IMemoryHeap> heap = mPictureBuffer->getMemory(&newoffset, &newsize); uint8_t* pInJPEGDataBUuf = (uint8_t *)heap->base() + newoffset ; //ptr to jpeg data uint8_t* pInThumbNailDataBuf = (uint8_t *)heap->base() + thumbNailOffset; //ptr to thmubnail uint8_t* pYUVDataBuf = (uint8_t *)heap->base() + yuvOffset; // FILE* fOut = NULL; // fOut = fopen("/dump/dump.jpg", "w"); // fwrite(pInJPEGDataBUuf, 1, JPEG_Image_Size, fOut); // fclose(fOut); CreateExif(pInThumbNailDataBuf, thumbnaiDataSize, pExifBuf, exifDataSize, EXIF_SET_JPEG_LENGTH); //create a new binder object mFinalPictureHeap = new MemoryHeapBase(exifDataSize+JPEG_Image_Size); mFinalPictureBuffer = new MemoryBase(mFinalPictureHeap,0,exifDataSize+JPEG_Image_Size); heap = mFinalPictureBuffer->getMemory(&newoffset, &newsize); uint8_t* pOutFinalJpegDataBuf = (uint8_t *)heap->base(); //create a new binder obj to send yuv data if(yuvOffset) { int mFrameSizeConvert = (preview_width*preview_height*3/2) ; mYUVPictureHeap = new MemoryHeapBase(mFrameSizeConvert); mYUVPictureBuffer = new MemoryBase(mYUVPictureHeap,0,mFrameSizeConvert); mYUVNewheap = mYUVPictureBuffer->getMemory(&newoffset, &newsize); PPM("YUV COLOR CONVERSION STARTED\n"); #ifdef NEON Neon_Convert_yuv422_to_NV21((uint8_t *)pYUVDataBuf, (uint8_t *)mYUVNewheap->base(), mPreviewWidth, mPreviewHeight); PPM("YUV COLOR CONVERSION ENDED\n"); if(mMsgEnabled & CAMERA_MSG_RAW_IMAGE) { mDataCb(CAMERA_MSG_RAW_IMAGE, mYUVPictureBuffer, mCallbackCookie); } #else if(mMsgEnabled & CAMERA_MSG_RAW_IMAGE) mDataCb(CAMERA_MSG_RAW_IMAGE, pYUVDataBuf, mCallbackCookie); #endif } //create final JPEG with EXIF into that int OutJpegSize = 0; if(!CreateJpegWithExif( pInJPEGDataBUuf, JPEG_Image_Size, pExifBuf, exifDataSize, pOutFinalJpegDataBuf, OutJpegSize)) { LOGE("createJpegWithExif fail!!\n"); return -1; } if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mFinalPictureBuffer, mCallbackCookie); } } //CAMERA_MODE_JPEG // camera returns 16 bit uyv image // -> needs to process (rotate/flip) // -> and compess to jpeg (with dsp) if(mCamera_Mode == CAMERA_MODE_YUV) { #ifdef HARDWARE_OMX // create new buffer for image processing int mFrameSizeConvert = (image_width*image_height*2) ; mYUVPictureHeap = new MemoryHeapBase(mFrameSizeConvert); mYUVPictureBuffer = new MemoryBase(mYUVPictureHeap,0,mFrameSizeConvert); mYUVNewheap = mYUVPictureBuffer->getMemory(&newoffset, &newsize); // buffer from v4l holding the actual image uint8_t *pYuvBuffer = (uint8_t*)buffer.m.userptr; LOGD("PictureThread: generated a picture, pYuvBuffer=%p yuv_len=%d\n", pYuvBuffer, capture_len); PPM("YUV COLOR ROTATION STARTED\n"); #ifdef R3D4_CONVERT if(mCameraIndex == VGA_CAMERA) { LOGV("use rotation"); // color converter and image processing (flip/rotate) // neon lib doesnt seem to work, jpeg was corrupted? // so use own stuff pConvert = new CColorConvert(pYuvBuffer, image_width, image_height, UYV2); //pConvert->writeFile(DUMP_PATH "before_rotate.uyv", SOURCE); //pConvert->writeFile(DUMP_PATH "before_rotate.bmp", BMP); if(mCameraIndex == VGA_CAMERA ) pConvert->rotateImage(ROTATE_270); // else // pConvert->flipImage(FLIP_VERTICAL); // write rotatet image back to input buffer //pConvert->writeFile(DUMP_PATH "after_rotate.bmp", BMP); pConvert->makeUYV2(NULL, INPLACE); //INPLACE: no new buffer, write to input buffer image_width = pConvert->getWidth(); image_height = pConvert->geHeight(); } #else #endif PPM("YUV COLOR ROTATION Done\n"); //pYuvBuffer: [Reused]Output buffer with YUV 420P 270 degree rotated. if(mMsgEnabled & CAMERA_MSG_RAW_IMAGE) { // convert pYuvBuffer(YUV422I) to mYUVPictureBuffer(YUV420P) Neon_Convert_yuv422_to_YUV420P(pYuvBuffer, (uint8_t *)mYUVNewheap->base(), image_width, image_height); mDataCb(CAMERA_MSG_RAW_IMAGE, mYUVPictureBuffer, mCallbackCookie); } #endif //HARDWARE_OMX if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { #ifdef HARDWARE_OMX // int inputFormat = PIX_YUV420P; // int imputSize = image_width * image_height * PIX_YUV420P_BYTES_PER_PIXEL; int inputFormat = PIX_YUV422I; int inputSize = image_width * image_height * PIX_YUV422I_BYTES_PER_PIXEL; int jpegSize = image_width * image_height * JPG_BYTES_PER_PIXEL; CreateExif(NULL, 0, pExifBuf, exifDataSize, EXIF_NOTSET_JPEG_LENGTH); HAL_PRINT("VGA EXIF size : %d\n", exifDataSize); mJPEGPictureHeap = new MemoryHeapBase(jpegSize + 256); outBuffer = (void *)((unsigned long)(mJPEGPictureHeap->getBase()) + 128); HAL_PRINT("YUV capture : outbuffer = 0x%x, jpegSize = %d, pYuvBuffer = 0x%x, yuv_len = %d, image_width = %d, image_height = %d, quality = %d, mippMode =%d\n", outBuffer, jpegSize, pYuvBuffer, capture_len, image_width, image_height, mYcbcrQuality, mippMode); if(jpegEncoder) { PPM("BEFORE JPEG Encode Image\n"); err = jpegEncoder->encodeImage( outBuffer, // void* outputBuffer, jpegSize, // int outBuffSize, pYuvBuffer, // void *inputBuffer, inputSize, // int inBuffSize, pExifBuf, // unsigned char* pExifBuf, exifDataSize, // int ExifSize, image_width, // int width, image_height, // int height, mThumbnailWidth, // int ThumbWidth, mThumbnailHeight, // int ThumbHeight, mYcbcrQuality, // int quality, inputFormat); // int isPixelFmt420p) PPM("AFTER JPEG Encode Image\n"); LOGD("JPEG ENCODE END\n"); if(err != true) { LOGE("Jpeg encode failed!!\n"); return -1; } else LOGD("Jpeg encode success!!\n"); } mJPEGPictureMemBase = new MemoryBase(mJPEGPictureHeap, 128, jpegEncoder->jpegSize); if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJPEGPictureMemBase, mCallbackCookie); } mJPEGPictureMemBase.clear(); mJPEGPictureHeap.clear(); #endif //HARDWARE_OMX }//END of CAMERA_MSG_COMPRESSED_IMAGE #ifdef R3D4_CONVERT delete pConvert; #endif }//END of CAMERA_MODE_YUV mPictureBuffer.clear(); mPictureHeap.clear(); if(mCamera_Mode == CAMERA_MODE_JPEG) { mFinalPictureBuffer.clear(); mFinalPictureHeap.clear(); } mYUVPictureBuffer.clear(); mYUVPictureHeap.clear(); delete []pExifBuf; mCaptureFlag = false; LOG_FUNCTION_NAME_EXIT return NO_ERROR; }