camera_memory_t*  V4L2Camera::GrabJpegFrame (camera_request_memory   mRequestMemory)
{
    int ret;

    videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->buf.memory = V4L2_MEMORY_MMAP;

    /* Dequeue buffer */
    ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
    if (ret < 0) {
        ALOGE("GrabJpegFrame: VIDIOC_DQBUF Failed");
        return NULL;
    }
    nDequeued++;

    ALOGI("GrabJpegFrame: Generated a frame from capture device");

    /* Enqueue buffer */
    ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
    if (ret < 0) {
        ALOGE("GrabJpegFrame: VIDIOC_QBUF Failed");
        return NULL;
    }
    nQueued++;

    size_t bytesused = videoIn->buf.bytesused;
    if (char *tmpBuf = new char[bytesused]) {
        MemoryStream strm(tmpBuf, bytesused);
        saveYUYVtoJPEG((unsigned char *)videoIn->mem[videoIn->buf.index], videoIn->width, videoIn->height, strm, 100);
        strm.closeStream();
        size_t fileSize = strm.getOffset();
         camera_memory_t* picture = mRequestMemory(-1,fileSize,1,NULL);
        memcpy(picture->data, tmpBuf, fileSize);
        delete[] tmpBuf;

        return picture;
    }

    return NULL;
}
int CameraHardware::previewThread()
{

    int width, height;
    int err;
    IMG_native_handle_t** hndl2hndl;
    IMG_native_handle_t* handle;
    int stride;
    mParameters.getPreviewSize(&width, &height);
    int framesize= width * height * 1.5 ; //yuv420sp


    if (!previewStopped) {

        mLock.lock();

        if (mNativeWindow != NULL) {

            if ((err = mNativeWindow->dequeue_buffer(mNativeWindow,(buffer_handle_t**) &hndl2hndl,&stride)) != 0) {
                LOGW("Surface::dequeueBuffer returned error %d", err);
                return -1;
            }

            mNativeWindow->lock_buffer(mNativeWindow, (buffer_handle_t*) hndl2hndl);
            GraphicBufferMapper &mapper = GraphicBufferMapper::get();
            Rect bounds(width, height);

            void *tempbuf;
            void *dst;

            if(0 == mapper.lock((buffer_handle_t)*hndl2hndl,CAMHAL_GRALLOC_USAGE, bounds, &dst))
            {
                // Get preview frame
                tempbuf=mCamera->GrabPreviewFrame();
                convertYUYVtoRGB565((unsigned char *)tempbuf,(unsigned char *)dst, width, height);
                mapper.unlock((buffer_handle_t)*hndl2hndl);

                mNativeWindow->enqueue_buffer(mNativeWindow,(buffer_handle_t*) hndl2hndl);

                if ((mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) ||
                    (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {

                    camera_memory_t* picture = mRequestMemory(-1, framesize, 1, NULL);
                    convertYUYVtoRGB565((unsigned char *)tempbuf, (unsigned char*)picture->data, width, height);

                    if ((mMsgEnabled & CAMERA_MSG_VIDEO_FRAME ) &&
                         mRecordingEnabled ) {
                        nsecs_t timeStamp = systemTime(SYSTEM_TIME_MONOTONIC);
                        //mTimestampFn(timeStamp, CAMERA_MSG_VIDEO_FRAME,mRecordBuffer, mUser);
                    }
                    mDataCb(CAMERA_MSG_PREVIEW_FRAME,picture,0,NULL,mCallbackCookie);
		}

                mCamera->ReleasePreviewFrame();
            }

        }
        mLock.unlock();
    }

    return NO_ERROR;
}