EXPORT_API bool arwGetVideoParamsStereo(int *widthL, int *heightL, int *pixelSizeL, char *pixelFormatStringBufferL, int pixelFormatStringBufferLenL, int *widthR, int *heightR, int *pixelSizeR, char *pixelFormatStringBufferR, int pixelFormatStringBufferLenR)
{
    AR_PIXEL_FORMAT pfL, pfR;
    
    if (!gARTK) return false;
	if (!gARTK->videoParameters(0, widthL, heightL, &pfL)) return false;
	if (!gARTK->videoParameters(1, widthR, heightR, &pfR)) return false;
    if (pixelSizeL) *pixelSizeL = arUtilGetPixelSize(pfL);
    if (pixelSizeR) *pixelSizeR = arUtilGetPixelSize(pfR);
    if (pixelFormatStringBufferL && pixelFormatStringBufferLenL > 0) {
        strncpy(pixelFormatStringBufferL, arUtilGetPixelFormatName(pfL), pixelFormatStringBufferLenL);
        pixelFormatStringBufferL[pixelFormatStringBufferLenL - 1] = '\0'; // guarantee nul termination.
    }
    if (pixelFormatStringBufferR && pixelFormatStringBufferLenR > 0) {
        strncpy(pixelFormatStringBufferR, arUtilGetPixelFormatName(pfR), pixelFormatStringBufferLenR);
        pixelFormatStringBufferR[pixelFormatStringBufferLenR - 1] = '\0'; // guarantee nul termination.
    }
    return true;
}
EXPORT_API bool arwGetVideoParams(int *width, int *height, int *pixelSize, char *pixelFormatStringBuffer, int pixelFormatStringBufferLen)
{
    AR_PIXEL_FORMAT pf;
    
    if (!gARTK) return false;
	if (!gARTK->videoParameters(0, width, height, &pf)) return false;
    if (pixelSize) *pixelSize = arUtilGetPixelSize(pf);
    if (pixelFormatStringBuffer && pixelFormatStringBufferLen > 0) {
        strncpy(pixelFormatStringBuffer, arUtilGetPixelFormatName(pf), pixelFormatStringBufferLen);
        pixelFormatStringBuffer[pixelFormatStringBufferLen - 1] = '\0'; // guarantee nul termination.
    }
    return true;
}
示例#3
0
int arSetPixelFormat( ARHandle *handle, AR_PIXEL_FORMAT pixFormat )
{
    int monoFormat;
    
    if (handle == NULL) return (-1);
    if (pixFormat == handle->arPixelFormat) return (0);

    switch( pixFormat ) {
        case AR_PIXEL_FORMAT_RGB:
        case AR_PIXEL_FORMAT_BGR:
        case AR_PIXEL_FORMAT_RGBA:
        case AR_PIXEL_FORMAT_BGRA:
        case AR_PIXEL_FORMAT_ABGR:
        case AR_PIXEL_FORMAT_ARGB:
        case AR_PIXEL_FORMAT_2vuy:
        case AR_PIXEL_FORMAT_yuvs:
        case AR_PIXEL_FORMAT_RGB_565:
        case AR_PIXEL_FORMAT_RGBA_5551:
        case AR_PIXEL_FORMAT_RGBA_4444:
            monoFormat = FALSE;
            break;
        case AR_PIXEL_FORMAT_MONO:
        case AR_PIXEL_FORMAT_420v:
        case AR_PIXEL_FORMAT_420f:
        case AR_PIXEL_FORMAT_NV21:
            monoFormat = TRUE;
            break;
        default:
            ARLOGe("Error: Unsupported pixel format (%d) requested.\n", pixFormat);
            return -1;
    }

    handle->arPixelFormat = pixFormat;
    handle->arPixelSize   = arUtilGetPixelSize(handle->arPixelFormat);
    
    // Update handle settings that depend on pixel format.
    // None.
    
    // If template matching, automatically switch to these most suitable colour template matching mode.
    if (monoFormat) {
        if (handle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR) handle->arPatternDetectionMode = AR_TEMPLATE_MATCHING_MONO;
        else if (handle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX) handle->arPatternDetectionMode = AR_TEMPLATE_MATCHING_MONO_AND_MATRIX;
    } else {
        if (handle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO) handle->arPatternDetectionMode = AR_TEMPLATE_MATCHING_MONO_AND_MATRIX;
        else if (handle->arPatternDetectionMode == AR_TEMPLATE_MATCHING_MONO_AND_MATRIX) handle->arPatternDetectionMode = AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX;
    }
    
    return 0;
}
示例#4
0
THREAD_HANDLE_T *trackingInitInit( KpmHandle *kpmHandle )
{
    TrackingInitHandle  *trackingInitHandle;
    THREAD_HANDLE_T     *threadHandle;

    if (!kpmHandle) {
        ARLOGe("trackingInitInit(): Error: NULL KpmHandle.\n");
        return (NULL);
    }
    
    trackingInitHandle = (TrackingInitHandle *)malloc(sizeof(TrackingInitHandle));
    if( trackingInitHandle == NULL ) return NULL;
    trackingInitHandle->kpmHandle = kpmHandle;
    trackingInitHandle->imageSize = kpmHandle->xsize * kpmHandle->ysize * arUtilGetPixelSize(kpmHandle->pixFormat);
    trackingInitHandle->imagePtr  = (ARUint8 *)malloc(trackingInitHandle->imageSize);
    trackingInitHandle->flag      = 0;

    threadHandle = threadInit(0, trackingInitHandle, trackingInitMain);
    return threadHandle;
}
bool ARToolKitVideoSource::open() {
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): called, opening ARToolKit video");
    
    if (deviceState != DEVICE_CLOSED) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error: device is already open, exiting returning false");
        return false;
    }

	// Open the video path
    gVid = ar2VideoOpen(videoConfiguration);
    if (!gVid) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): arVideoOpen unable to open connection to camera using configuration '%s', exiting returning false", videoConfiguration);
    	return false;
	}

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Opened connection to camera using configuration '%s'", videoConfiguration);
	deviceState = DEVICE_OPEN;
    
    // Find the size of the video
	if (ar2VideoGetSize(gVid, &videoWidth, &videoHeight) < 0) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get video size, calling close(), exiting returning false");
        this->close();
		return false;
	}
	
	// Get the format in which the camera is returning pixels
	pixelFormat = ar2VideoGetPixelFormat(gVid);
	if (pixelFormat < 0 ) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): Error: unable to get pixel format, calling close(), exiting returning false");
        this->close();
		return false;
	}
    
    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Video %dx%d@%dBpp (%s)", videoWidth, videoHeight, arUtilGetPixelSize(pixelFormat), arUtilGetPixelFormatName(pixelFormat));

#ifndef _WINRT
    // Translate pixel format into OpenGL texture intformat, format, and type.
    switch (pixelFormat) {
        case AR_PIXEL_FORMAT_RGBA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_BGRA:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_BGRA;
            glPixType = GL_UNSIGNED_BYTE;
            break;
		case AR_PIXEL_FORMAT_ABGR:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_ABGR_EXT;
            glPixType = GL_UNSIGNED_BYTE;
			break;
		case AR_PIXEL_FORMAT_ARGB:
				glPixIntFormat = GL_RGBA;
				glPixFormat = GL_BGRA;
#ifdef AR_BIG_ENDIAN
				glPixType = GL_UNSIGNED_INT_8_8_8_8_REV;
#else
				glPixType = GL_UNSIGNED_INT_8_8_8_8;
#endif
			break;
		case AR_PIXEL_FORMAT_BGR:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_BGR;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_MONO:
        case AR_PIXEL_FORMAT_420v:
        case AR_PIXEL_FORMAT_420f:
        case AR_PIXEL_FORMAT_NV21:
            glPixIntFormat = GL_LUMINANCE;
            glPixFormat = GL_LUMINANCE;
            glPixType = GL_UNSIGNED_BYTE;
            break;
        case AR_PIXEL_FORMAT_RGB_565:
            glPixIntFormat = GL_RGB;
            glPixFormat = GL_RGB;
            glPixType = GL_UNSIGNED_SHORT_5_6_5;
            break;
        case AR_PIXEL_FORMAT_RGBA_5551:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_5_5_5_1;
            break;
        case AR_PIXEL_FORMAT_RGBA_4444:
            glPixIntFormat = GL_RGBA;
            glPixFormat = GL_RGBA;
            glPixType = GL_UNSIGNED_SHORT_4_4_4_4;
            break;
        default:
            ARController::logv("Error: Unsupported pixel format.\n");
            this->close();
			return false;
            break;
    }
#endif // !_WINRT

#if TARGET_PLATFORM_IOS
    // Tell arVideo what the typical focal distance will be. Note that this does NOT
    // change the actual focus, but on devices with non-fixed focus, it lets arVideo
    // choose a better set of camera parameters.
    ar2VideoSetParami(gVid, AR_VIDEO_PARAM_IOS_FOCUS, AR_VIDEO_IOS_FOCUS_0_3M); // Default is 0.3 metres. See <AR/sys/videoiPhone.h> for allowable values.
#endif
    
    // Load the camera parameters, resize for the window and init.
    ARParam cparam;
    // Prefer internal camera parameters.
    if (ar2VideoGetCParam(gVid, &cparam) == 0) {
        ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Using internal camera parameters.");
    } else {
        const char cparam_name_default[] = "camera_para.dat"; // Default name for the camera parameters.
        if (cameraParamBuffer) {
            if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters from buffer, calling close(), exiting returning false");
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): Camera parameters loaded from buffer");
            }
        } else {
            if (arParamLoad((cameraParam ? cameraParam : cparam_name_default), 1, &cparam) < 0) {
                ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to load camera parameters %s, calling close(), exiting returning false",
                                   (cameraParam ? cameraParam : cparam_name_default));        
                this->close();
                return false;
            } else {
                ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open():Camera parameters loaded from %s", (cameraParam ? cameraParam : cparam_name_default));
            }
        }
    }

    if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv(AR_LOG_LEVEL_ERROR, "*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error-failed to create camera parameters lookup table, calling close(), exiting returning false");
        this->close();
		return false;
	}

	int err = ar2VideoCapStart(gVid);
	if (err != 0) {
        if (err == -2) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error starting video-device unavailable \"%d,\" setting ARW_ERROR_DEVICE_UNAVAILABLE error state", err);
            setError(ARW_ERROR_DEVICE_UNAVAILABLE);
        } else {
            ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): error \"%d\" starting video capture", err);
        }
        ARController::logv(AR_LOG_LEVEL_ERROR, "ARWrap::ARToolKitVideoSource::open(): calling close(), exiting returning false");
        this->close();
		return false;		
	}

	deviceState = DEVICE_RUNNING;

    ARController::logv(AR_LOG_LEVEL_DEBUG, "ARWrap::ARToolKitVideoSource::open(): exiting returning true, deviceState = DEVICE_RUNNING, video capture started");
	return true;
}
bool AndroidVideoSource::getVideoReadyAndroid2(const ARParam *cparam_p) {
    
	// Load camera parameters
    ARParam cparam;
	if (cparam_p) cparam = *cparam_p;
	else {
	    ARController::logv("Unable to automatically determine camera parameters. Using supplied default.\n");
        if (cameraParam) {
            if (arParamLoad(cameraParam, 1, &cparam) < 0) {
                ARController::logv("Error: Unable to load camera parameters from file '%s'.", cameraParam);
                goto bail;
            }
        } else if (cameraParamBuffer) {
            if (arParamLoadFromBuffer(cameraParamBuffer, cameraParamBufferLen, &cparam) < 0) {
                ARController::logv("Error: Unable to load camera parameters from buffer.");
                goto bail;
            }
        } else {
            ARController::logv("Error: video source must be configured before opening.");
            goto bail;
        }
	}

	if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv("*** Camera Parameter resized from %d, %d. ***", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
    	ARController::logv("Error: Failed to create camera parameters lookup table.");
        goto bail;
	}

	// Allocate local buffer for video frame after copy or conversion.
    if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) {
        frameBufferSize = videoWidth * videoHeight + 2 * videoWidth/2 * videoHeight/2;
    } else {
        frameBufferSize = videoWidth * videoHeight * arUtilGetPixelSize(pixelFormat);
    }
    localFrameBuffer = (ARUint8*)calloc(frameBufferSize, sizeof(ARUint8));
	if (!localFrameBuffer) {
        ARController::logv("Error: Unable to allocate memory for local video frame buffer");
        goto bail;
	}
    frameBuffer = localFrameBuffer;
    if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) {
        frameBuffer2 = localFrameBuffer + videoWidth*videoHeight;
    } else {
        frameBuffer2 = NULL;
    }
    

	ARController::logv("Android Video Source running %dx%d.", videoWidth, videoHeight);

	deviceState = DEVICE_RUNNING;
    return true;
    
bail:
    deviceState = DEVICE_OPEN;
    return false;
}
示例#7
0
bool AndroidVideoSource::getVideoReadyAndroid2(const ARParam *cparam_p)
{
    ARParam cparam;

    if (cparam_p) {
        cparam = *cparam_p;
    } else {
        arParamClearWithFOVy(&cparam, videoWidth, videoHeight, M_PI_4); // M_PI_4 radians = 45 degrees.
        ARController::logv(AR_LOG_LEVEL_WARN, "Using default camera parameters for %dx%d image size, 45 degrees vertical field-of-view.", videoWidth, videoHeight);
    }

	if (cparam.xsize != videoWidth || cparam.ysize != videoHeight) {
#ifdef DEBUG
        ARController::logv(AR_LOG_LEVEL_WARN, "*** Camera Parameter resized from %d, %d. ***", cparam.xsize, cparam.ysize);
#endif
        arParamChangeSize(&cparam, videoWidth, videoHeight, &cparam);
    }
	if (!(cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET))) {
    	ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Failed to create camera parameters lookup table.");
        goto bail;
	}

	// Allocate buffer for incoming video frame.
    incomingFrameRawBufferSize = videoWidth * videoHeight + 2 * videoWidth/2 * videoHeight/2;
    incomingFrameRawBuffer[0] = (unsigned char *)calloc(incomingFrameRawBufferSize, sizeof(unsigned char));
    incomingFrameRawBuffer[1] = (unsigned char *)calloc(incomingFrameRawBufferSize, sizeof(unsigned char));
    if (!incomingFrameRawBuffer[0] || !incomingFrameRawBuffer[1]) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Unable to allocate memory for incoming frame raw buffer.");
        goto bail;
    }
    
    // Next, an AR2VideoBufferT.
    localFrameBuffer = (AR2VideoBufferT *)calloc(1, sizeof(AR2VideoBufferT));
    if (!localFrameBuffer) {
        ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Unable to allocate memory for local video frame buffer");
        goto bail;
    }
    
    if (pixelFormat == AR_PIXEL_FORMAT_NV21 || pixelFormat == AR_PIXEL_FORMAT_420f) {
        localFrameBuffer->buff = incomingFrameRawBuffer[0];
        localFrameBuffer->buffLuma = incomingFrameRawBuffer[0];
        localFrameBuffer->bufPlaneCount = 2;
        localFrameBuffer->bufPlanes = (ARUint8 **)calloc(2, sizeof(ARUint8 *));
        localFrameBuffer->bufPlanes[0] = incomingFrameRawBuffer[0];
        localFrameBuffer->bufPlanes[1] = incomingFrameRawBuffer[0] + videoWidth*videoHeight;
    } else {
        convertedFrameRawBufferSize = videoWidth * videoHeight * arUtilGetPixelSize(pixelFormat);
        convertedFrameRawBuffer[0] = (ARUint8 *)calloc(convertedFrameRawBufferSize, sizeof(ARUint8));
        convertedFrameRawBuffer[1] = (ARUint8 *)calloc(convertedFrameRawBufferSize, sizeof(ARUint8));
        if (!convertedFrameRawBuffer[0] || !convertedFrameRawBuffer[1]) {
            ARController::logv(AR_LOG_LEVEL_ERROR, "Error: Unable to allocate memory for converted video frame buffer.");
            goto bail;
        }
        localFrameBuffer->buff = convertedFrameRawBuffer[0];
        localFrameBuffer->buffLuma = incomingFrameRawBuffer[0];
        localFrameBuffer->bufPlaneCount = 0;
    }
    
    frameBuffer = localFrameBuffer;

	ARController::logv(AR_LOG_LEVEL_INFO, "Android Video Source running %dx%d.", videoWidth, videoHeight);

	deviceState = DEVICE_RUNNING;
    return true;
    
bail:
    if (localFrameBuffer) {
        free(localFrameBuffer->bufPlanes);
        free(localFrameBuffer);
        localFrameBuffer = NULL;
    }
    if (incomingFrameRawBuffer[0]) {
        free(incomingFrameRawBuffer[0]);
        incomingFrameRawBuffer[0] = NULL;
    }
    if (incomingFrameRawBuffer[1]) {
        free(incomingFrameRawBuffer[1]);
        incomingFrameRawBuffer[1] = NULL;
    }
    if (convertedFrameRawBuffer[0]) {
        free(convertedFrameRawBuffer[0]);
        convertedFrameRawBuffer[0] = NULL;
    }
    if (convertedFrameRawBuffer[1]) {
        free(convertedFrameRawBuffer[1]);
        convertedFrameRawBuffer[1] = NULL;
    }
    incomingFrameRawBufferSize = 0;
    convertedFrameRawBufferSize = 0;
    frameBuffer = NULL;
    
    deviceState = DEVICE_OPEN;
    return false;
}
示例#8
0
bool VideoSource::updateTexture(Color* buffer) {
	
	static int lastFrameStamp = 0;

    if (!buffer) return false; // Sanity check.
    
    if (!frameBuffer) return false; // Check that a frame is actually available.
	
    // Extra check: don't update the array if the current frame is the same is previous one.
	if (lastFrameStamp == frameStamp) return false;
    
    int pixelSize = arUtilGetPixelSize(pixelFormat);
    switch (pixelFormat) {
        case AR_PIXEL_FORMAT_BGRA:
        case AR_PIXEL_FORMAT_BGR:
            for (int y = 0; y < videoHeight; y++) {
                ARUint8 *inp = &frameBuffer[videoWidth*y*pixelSize];
                Color *outp = &buffer[videoWidth*y];
                for (int pixelsToGo = videoWidth; pixelsToGo > 0; pixelsToGo--) {
                    outp->b = (float)*(inp + 0) / 255.0f;
                    outp->g = (float)*(inp + 1) / 255.0f;
                    outp->r = (float)*(inp + 2) / 255.0f;
                    outp->a = 1.0f ;
                    inp += pixelSize;
                    outp++;
                }
            }
            break;
        case AR_PIXEL_FORMAT_RGBA:
        case AR_PIXEL_FORMAT_RGB:
            for (int y = 0; y < videoHeight; y++) {
                ARUint8 *inp = &frameBuffer[videoWidth*y*pixelSize];
                Color *outp = &buffer[videoWidth*y];
                for (int pixelsToGo = videoWidth; pixelsToGo > 0; pixelsToGo--) {
                    outp->r = (float)*(inp + 0) / 255.0f;
                    outp->g = (float)*(inp + 1) / 255.0f;
                    outp->b = (float)*(inp + 2) / 255.0f;
                    outp->a = 1.0f;
                    inp += pixelSize;
                    outp++;
                }
            }
            break;
        case AR_PIXEL_FORMAT_ARGB:
            for (int y = 0; y < videoHeight; y++) {
                ARUint8 *inp = &frameBuffer[videoWidth*y*pixelSize];
                Color *outp = &buffer[videoWidth*y];
                for (int pixelsToGo = videoWidth; pixelsToGo > 0; pixelsToGo--) {
                    outp->r = (float)*(inp + 1) / 255.0f;
                    outp->g = (float)*(inp + 2) / 255.0f;
                    outp->b = (float)*(inp + 3) / 255.0f;
                    outp->a = 1.0f;
                    inp += pixelSize;
                    outp++;
                }
            }
            break;
        case AR_PIXEL_FORMAT_ABGR:
            for (int y = 0; y < videoHeight; y++) {
                ARUint8 *inp = &frameBuffer[videoWidth*y*pixelSize];
                Color *outp = &buffer[videoWidth*y];
                for (int pixelsToGo = videoWidth; pixelsToGo > 0; pixelsToGo--) {
                    outp->b = (float)*(inp + 1) / 255.0f;
                    outp->g = (float)*(inp + 2) / 255.0f;
                    outp->r = (float)*(inp + 3) / 255.0f;
                    outp->a = 1.0f;
                    inp += pixelSize;
                    outp++;
                }
            }
            break;
        case AR_PIXEL_FORMAT_MONO:
            for (int y = 0; y < videoHeight; y++) {
                ARUint8 *inp = &frameBuffer[videoWidth*y*pixelSize];
                Color *outp = &buffer[videoWidth*y];
                for (int pixelsToGo = videoWidth; pixelsToGo > 0; pixelsToGo--) {
                    outp->b = outp->g = outp->r = (float)*inp / 255.0f;
                    outp->a = 1.0f;
                    inp += pixelSize;
                    outp++;
                }
            }
            break;
        default:
            return false;
            break;
    }
    
    lastFrameStamp = frameStamp; // Record the new framestamp
    return true;

}