void MSPrimitive::print(const char *file_) { MSBoolean fileOpen=MSFalse; MSBoolean open=MSTrue; if (outputMode()==Draw) { if (file_!=0) displayPrintFileName(file_); if ((open=displayPrintOpen(this))==MSTrue) { fileOpen=MSTrue; outputMode(Print); displayPrintXorigin(0); displayPrintYorigin(0); } } if (open==MSTrue) { redraw(); if (fileOpen==MSTrue) { displayPrintClose(); outputMode(Draw); } } }
bool WEBPImageDecoder::decodeSingleFrame(const uint8_t* dataBytes, size_t dataSize, size_t frameIndex) { if (failed()) return false; ASSERT(isDecodedSizeAvailable()); ASSERT(m_frameBufferCache.size() > frameIndex); ImageFrame& buffer = m_frameBufferCache[frameIndex]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); // The buffer is transparent outside the decoded area while the image is loading. // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded. buffer.setHasAlpha(true); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } const IntRect& frameRect = buffer.originalFrameRect(); if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG); if (!m_premultiplyAlpha) mode = outputMode(false); #if USE(QCMSLIB) if (colorTransform()) mode = MODE_RGBA; // Decode to RGBA for input to libqcms. #endif WebPInitDecBuffer(&m_decoderBuffer); m_decoderBuffer.colorspace = mode; m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData); m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height(); m_decoderBuffer.is_external_memory = 1; m_decoder = WebPINewDecoder(&m_decoderBuffer); if (!m_decoder) return setFailed(); } m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y())); switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: applyPostProcessing(frameIndex); buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha); buffer.setStatus(ImageFrame::FrameComplete); clearDecoder(); return true; case VP8_STATUS_SUSPENDED: if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) { applyPostProcessing(frameIndex); return false; } // FALLTHROUGH default: clear(); return setFailed(); } }
unsigned char dsInit() { gpio(); // P0_6 used as GPIO function outputMode(); // output mode pullUp(); // pull-up oneWireReset(); return oneWireCheck(); }
void oneWireReset() { outputMode(); outputHigh(); ds_delay_us(50); outputLow(); // pull the 1-wire bus low ds_delay_us(500); // pull the 1-wire bus low for reset pulse, at least 480us outputHigh(); // pull the 1-wire bus high for releases the bus // delay_us(500); // wait-out remaining initialisation window. }
void MSVGauge::drawSubWindows(void) { if (outputMode()<Print) { if (valueAlignment()!=MSNone) { valueWin()->map(); valueWin()->raise(); } else valueWin()->unmap(); valueWin()->moveTo(x_end(),y_org()); } }
bool WEBPImageDecoder::decode(bool onlySize) { if (failed()) return false; const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; #ifdef QCMS_WEBP_COLOR_CORRECTION WebPData inputData = { dataBytes, dataSize }; WebPDemuxState state; WebPDemuxer* demuxer = WebPDemuxPartial(&inputData, &state); if (!demuxer) return setFailed(); width = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH); height = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT); m_formatFlags = WebPDemuxGetI(demuxer, WEBP_FF_FORMAT_FLAGS); m_hasAlpha = !!(m_formatFlags & ALPHA_FLAG); WebPDemuxDelete(demuxer); if (state <= WEBP_DEMUX_PARSING_HEADER) return false; #elif (WEBP_DECODER_ABI_VERSION >= 0x0163) WebPBitstreamFeatures features; if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) return setFailed(); width = features.width; height = features.height; m_hasAlpha = features.has_alpha; #else // Earlier version won't be able to display WebP files with alpha. if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); m_hasAlpha = false; #endif if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(m_hasAlpha); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_hasAlpha); if (!m_premultiplyAlpha) mode = outputMode(false); if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) mode = MODE_RGBA; // Decode to RGBA for input to libqcms. int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(mode, output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) applyColorProfile(dataBytes, dataSize, buffer); buffer.setStatus(ImageFrame::FrameComplete); clear(); return true; case VP8_STATUS_SUSPENDED: if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) applyColorProfile(dataBytes, dataSize, buffer); return false; default: clear(); return setFailed(); } }
bool WEBPImageDecoder::decode(bool onlySize) { if (failed()) return false; #if defined(__LB_SHELL__) // We dont want progressive decoding. if (!isAllDataReceived()) return false; #endif const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; #if (WEBP_DECODER_ABI_VERSION >= 0x0163) WebPBitstreamFeatures features; if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK) return setFailed(); width = features.width; height = features.height; m_hasAlpha = features.has_alpha; #else // Earlier version won't be able to display WebP files with alpha. if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); m_hasAlpha = false; #endif if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(m_hasAlpha); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(outputMode(m_hasAlpha), output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: buffer.setStatus(ImageFrame::FrameComplete); WebPIDelete(m_decoder); m_decoder = 0; return true; case VP8_STATUS_SUSPENDED: return false; default: WebPIDelete(m_decoder); m_decoder = 0; return setFailed(); } }
bool WEBPImageDecoder::decodeSingleFrame(const uint8_t* dataBytes, size_t dataSize, size_t frameIndex) { if (failed()) return false; ASSERT(isDecodedSizeAvailable()); ASSERT(m_frameBufferCache.size() > frameIndex); ImageFrame& buffer = m_frameBufferCache[frameIndex]; ASSERT(buffer.getStatus() != ImageFrame::FrameComplete); if (buffer.getStatus() == ImageFrame::FrameEmpty) { if (!buffer.setSizeAndColorSpace(size().width(), size().height(), colorSpace())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); // The buffer is transparent outside the decoded area while the image is // loading. The correct alpha value for the frame will be set when it is // fully decoded. buffer.setHasAlpha(true); buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } const IntRect& frameRect = buffer.originalFrameRect(); if (!m_decoder) { WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG); if (!m_premultiplyAlpha) mode = outputMode(false); if (colorTransform()) { // Swizzling between RGBA and BGRA is zero cost in a color transform. // So when we have a color transform, we should decode to whatever is // easiest for libwebp, and then let the color transform swizzle if // necessary. // Lossy webp is encoded as YUV (so RGBA and BGRA are the same cost). // Lossless webp is encoded as BGRA. This means decoding to BGRA is // either faster or the same cost as RGBA. mode = MODE_BGRA; } WebPInitDecBuffer(&m_decoderBuffer); m_decoderBuffer.colorspace = mode; m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData); m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height(); m_decoderBuffer.is_external_memory = 1; m_decoder = WebPINewDecoder(&m_decoderBuffer); if (!m_decoder) return setFailed(); } m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y())); switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: applyPostProcessing(frameIndex); buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha); buffer.setStatus(ImageFrame::FrameComplete); clearDecoder(); return true; case VP8_STATUS_SUSPENDED: if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) { applyPostProcessing(frameIndex); return false; } // FALLTHROUGH default: clear(); return setFailed(); } }
static void parseCommandLine(Ref<ParseStream> cin, const FileName& path) { while (true) { std::string tag = cin->getString(); if (tag == "") return; /* parse command line parameters from a file */ if (tag == "-c") { FileName file = path + cin->getFileName(); parseCommandLine(new ParseStream(new LineCommentFilter(file,"#")),file.path()); } /* read model from file */ else if (tag == "-i") *g_scene += load(path+cin->getFileName()); /* triangulated sphere */ else if (tag == "-trisphere") { Ref<Device::RTShape> sphere = g_device->rtNewShape("sphere"); sphere->rtSetFloat3("P",cin->getVec3f()); sphere->rtSetFloat1("r",cin->getFloat()); sphere->rtSetInt1("numTheta",cin->getInt()); sphere->rtSetInt1("numPhi",cin->getInt()); sphere->rtCommit(); Ref<Device::RTMaterial> material = g_device->rtNewMaterial("matte"); material->rtSetFloat3("reflection",Col3f(1.0f,0.0f,0.0f)); material->rtCommit(); *g_scene += new ShapeNode(sphere,material); } /* ambient light source */ else if (tag == "-ambientlight") { Ref<Device::RTLight> light = g_device->rtNewLight("ambientlight"); light->rtSetFloat3("L",cin->getVec3f()); light->rtCommit(); *g_scene += new LightNode(light); } /* point light source */ else if (tag == "-pointlight") { Vec3f P = cin->getVec3f(); Vec3f I = cin->getVec3f(); std::cout << " P " << P.x << " : " << P.y << " : " << P.z << std::endl; std::cout << " I " << I.x << " : " << I.y << " : " << I.z << std::endl; Ref<Device::RTLight> light = g_device->rtNewLight("pointlight"); light->rtSetFloat3("P",P); light->rtSetFloat3("I",I); light->rtCommit(); *g_scene += new LightNode(light); } /* distant light source */ else if (tag == "-distantlight") { Ref<Device::RTLight> light = g_device->rtNewLight("distantlight"); light->rtSetFloat3("D",cin->getVec3f()); light->rtSetFloat3("L",cin->getVec3f()); light->rtSetFloat1("halfAngle",cin->getFloat()); light->rtCommit(); *g_scene += new LightNode(light); } /* triangular light source */ else if (tag == "-trianglelight") { Vec3f P = cin->getVec3f(); Vec3f U = cin->getVec3f(); Vec3f V = cin->getVec3f(); Vec3f L = cin->getVec3f(); Ref<Device::RTLight> light = g_device->rtNewLight("trianglelight"); light->rtSetFloat3("v0",P); light->rtSetFloat3("v1",P+U); light->rtSetFloat3("v2",P+V); light->rtSetFloat3("L" ,L); light->rtCommit(); *g_scene += new LightNode(light); } /* quad light source */ else if (tag == "-quadlight") { Vec3f P = cin->getVec3f(); Vec3f U = cin->getVec3f(); Vec3f V = cin->getVec3f(); Vec3f L = cin->getVec3f(); Ref<Device::RTLight> light0 = g_device->rtNewLight("trianglelight"); light0->rtSetFloat3("v0",P); light0->rtSetFloat3("v1",P+U); light0->rtSetFloat3("v2",P+U+V); light0->rtSetFloat3("L" ,L); light0->rtCommit(); *g_scene += new LightNode(light0); Ref<Device::RTLight> light1 = g_device->rtNewLight("trianglelight"); light1->rtSetFloat3("v0",P+U+V); light1->rtSetFloat3("v1",P+V); light1->rtSetFloat3("v2",P); light1->rtSetFloat3("L" ,L); light1->rtCommit(); *g_scene += new LightNode(light1); } /* HDRI light source */ else if (tag == "-hdrilight") { Ref<Device::RTLight> light = g_device->rtNewLight("hdrilight"); light->rtSetFloat3("L",cin->getVec3f()); light->rtSetImage("image",loadRTImage(path + cin->getFileName())); light->rtCommit(); *g_scene += new LightNode(light); } /* parse camera parameters */ else if (tag == "-vp") g_camPos = Vec3f(cin->getVec3f()); else if (tag == "-vi") g_camLookAt = Vec3f(cin->getVec3f()); else if (tag == "-vd") g_camLookAt = g_camPos+cin->getVec3f(); else if (tag == "-vu") g_camUp = cin->getVec3f(); else if (tag == "-angle") g_camAngle = cin->getFloat(); else if (tag == "-fov") g_camAngle = cin->getFloat(); else if (tag == "-radius") g_camRadius = cin->getFloat(); /* frame buffer size */ else if (tag == "-size") { g_width = cin->getInt(); g_height = cin->getInt(); g_frameBuffer = g_device->rtNewFrameBuffer("RGB_FLOAT32", g_width, g_height); } /* full screen mode */ else if (tag == "-fullscreen") { g_fullscreen = true; } /* refine rendering when not moving */ else if (tag == "-refine") g_refine = true; else if (tag == "-norefine") g_refine = false; /* acceleration structure to use */ else if (tag == "-accel") g_accel = cin->getString(); /* set renderer */ else if (tag == "-renderer") { std::string renderer = cin->getString(); if (renderer == "debug" ) g_renderer = parseDebugRenderer(cin,path); else if (renderer == "pt" ) g_renderer = parsePathTracer(cin,path); else if (renderer == "pathtracer") g_renderer = parsePathTracer(cin,path); else if (renderer == "material" ) g_renderer = parseMaterialRenderer(cin,path); else if (renderer == "opengl" ) g_renderer = parseOpenGLRenderer(cin); else throw std::runtime_error("unknown renderer: "+renderer); } /* set gamma */ else if (tag == "-gamma") { g_renderer->rtSetFloat1("gamma",g_gamma = cin->getFloat()); g_renderer->rtCommit(); } /* set recursion depth */ else if (tag == "-depth") { g_renderer->rtSetInt1("maxDepth",g_depth = cin->getInt()); g_renderer->rtCommit(); } /* set samples per pixel */ else if (tag == "-spp") { g_renderer->rtSetInt1("sampler.spp",g_spp = cin->getInt()); g_renderer->rtCommit(); } /* set the backplate */ else if (tag == "-backplate") { g_renderer->rtSetImage("backplate",g_backplate = loadRTImage(path + cin->getFileName())); g_renderer->rtCommit(); } /* render frame */ else if (tag == "-o") outputMode(path + cin->getFileName()); /* display image */ else if (tag == "-display") displayMode(); /* regression testing */ else if (tag == "-regression") { g_refine = false; g_regression = true; GLUTDisplay(OrthonormalSpace::lookAtPoint(g_camPos,g_camLookAt,g_camUp),0.01f); } else if (tag == "-version") { std::cout << "embree renderer version 1.0" << std::endl; exit(1); } else if (tag == "-h" || tag == "-?" || tag == "-help" || tag == "--help") { std::cout << std::endl; std::cout << "Embree Version 1.0" << std::endl; std::cout << std::endl; std::cout << " usage: embree -i model.obj -renderer debug -display" << std::endl; std::cout << " embree -i model.obj -renderer pathtracer -o out.tga" << std::endl; std::cout << " embree -c model.ecs -display" << std::endl; std::cout << std::endl; std::cout << "-renderer [debug,pathtracer]" << std::endl; std::cout << " Sets the renderer to use." << std::endl; std::cout << std::endl; std::cout << "-c file" << std::endl; std::cout << " Parses command line parameters from file." << std::endl; std::cout << std::endl; std::cout << "-i file" << std::endl; std::cout << " Loads a scene from file." << std::endl; std::cout << std::endl; std::cout << "-o file" << std::endl; std::cout << " Renders and outputs the image to the file." << std::endl; std::cout << std::endl; std::cout << "-display" << std::endl; std::cout << " Interactively displays the rendering into a window." << std::endl; std::cout << std::endl; std::cout << "-vp x y z" << std::endl; std::cout << " Sets camera position to the location (x,y,z)." << std::endl; std::cout << std::endl; std::cout << "-vi x y z" << std::endl; std::cout << " Sets camera lookat point to the location (x,y,z)." << std::endl; std::cout << std::endl; std::cout << "-vd x y z" << std::endl; std::cout << " Sets camera viewing direction to (x,y,z)." << std::endl; std::cout << std::endl; std::cout << "-vu x y z" << std::endl; std::cout << " Sets camera up direction to (x,y,z)." << std::endl; std::cout << std::endl; std::cout << "-fov angle" << std::endl; std::cout << " Sets camera field of view in y direction to angle." << std::endl; std::cout << std::endl; std::cout << "-size width height" << std::endl; std::cout << " Sets the width and height of image to render." << std::endl; std::cout << std::endl; std::cout << "-fullscreen" << std::endl; std::cout << " Enables full screen display mode." << std::endl; std::cout << std::endl; std::cout << "-accel [bvh2,bvh4,bvh4.spatial]" << std::endl; std::cout << " Sets the spatial index structure to use." << std::endl; std::cout << std::endl; std::cout << "-gamma v" << std::endl; std::cout << " Sets gamma correction to v (only pathtracer)." << std::endl; std::cout << std::endl; std::cout << "-depth i" << std::endl; std::cout << " Sets the recursion depth to i (default 16)" << std::endl; std::cout << std::endl; std::cout << "-spp i" << std::endl; std::cout << " Sets the number of samples per pixel to i (default 1) (only pathtracer)." << std::endl; std::cout << std::endl; std::cout << "-backplate" << std::endl; std::cout << " Sets a high resolution back ground image. (default none) (only pathtracer)." << std::endl; std::cout << std::endl; std::cout << "-ambientlight r g b" << std::endl; std::cout << " Creates an ambient light with intensity (r,g,b)." << std::endl; std::cout << std::endl; std::cout << "-pointlight px py pz r g b" << std::endl; std::cout << " Creates a point light with intensity (r,g,b) at position (px,py,pz)." << std::endl; std::cout << std::endl; std::cout << "-distantlight dx dy dz r g b halfAngle" << std::endl; std::cout << " Creates a distant sun light with intensity (r,g,b) shining into " << std::endl; std::cout << " direction (dx,dy,dz) from the cone spanned by halfAngle." << std::endl; std::cout << std::endl; std::cout << "-trianglelight px py pz ux uy uz vx vy vz r g b" << std::endl; std::cout << " Creates a triangle-light with intensity (r,g,b) spanned by the point " << std::endl; std::cout << " (px,py,pz) and the vectors (vx,vy,vz) and (ux,uy,uz)." << std::endl; std::cout << std::endl; std::cout << "-quadlight px py pz ux uy uz vx vy vz r g b" << std::endl; std::cout << " Creates a quad-light with intensity (r,g,b) spanned by the point " << std::endl; std::cout << " (px,py,pz) and the vectors (vx,vy,vz) and (ux,uy,uz)." << std::endl; std::cout << std::endl; std::cout << "-hdrilight r g b file" << std::endl; std::cout << " Creates a high dynamic range environment light from the image " << std::endl; std::cout << " file. The intensities are multiplies by (r,g,b)." << std::endl; std::cout << std::endl; std::cout << "-trisphere px py pz r theta phi" << std::endl; std::cout << " Creates a triangulated sphere with radius r at location (px,py,pz) " << std::endl; std::cout << " and triangulation rates theta and phi." << std::endl; std::cout << std::endl; std::cout << "-[no]refine" << std::endl; std::cout << " Enables (default) or disables the refinement display mode." << std::endl; std::cout << std::endl; std::cout << "-regression" << std::endl; std::cout << " Runs a stress test of the system." << std::endl; std::cout << std::endl; std::cout << "-version" << std::endl; std::cout << " Prints version number." << std::endl; std::cout << std::endl; std::cout << "-h, -?, -help, --help" << std::endl; std::cout << " Prints this help." << std::endl; exit(1); } /* skip unknown command line parameter */ else { std::cerr << "unknown command line parameter: " << tag << " "; while (cin->peek() != "" && cin->peek()[0] != '-') std::cerr << cin->getString() << " "; std::cerr << std::endl; } } }
bool WEBPImageDecoder::decode(bool onlySize) { #if PLATFORM(CHROMIUM) TRACE_EVENT("WEBPImageDecoder::decode", this, 0); #endif if (failed()) return false; const uint8_t* dataBytes = reinterpret_cast<const uint8_t*>(m_data->data()); const size_t dataSize = m_data->size(); if (!ImageDecoder::isSizeAvailable()) { static const size_t imageHeaderSize = 30; if (dataSize < imageHeaderSize) return false; int width, height; if (!WebPGetInfo(dataBytes, dataSize, &width, &height)) return setFailed(); if (!setSize(width, height)) return setFailed(); } ASSERT(ImageDecoder::isSizeAvailable()); if (onlySize) return true; ASSERT(!m_frameBufferCache.isEmpty()); ImageFrame& buffer = m_frameBufferCache[0]; ASSERT(buffer.status() != ImageFrame::FrameComplete); if (buffer.status() == ImageFrame::FrameEmpty) { if (!buffer.setSize(size().width(), size().height())) return setFailed(); buffer.setStatus(ImageFrame::FramePartial); buffer.setHasAlpha(false); // FIXME: webp does not support alpha yet. buffer.setOriginalFrameRect(IntRect(IntPoint(), size())); } if (!m_decoder) { int rowStride = size().width() * sizeof(ImageFrame::PixelData); uint8_t* output = reinterpret_cast<uint8_t*>(buffer.getAddr(0, 0)); int outputSize = size().height() * rowStride; m_decoder = WebPINewRGB(outputMode(), output, outputSize, rowStride); if (!m_decoder) return setFailed(); } switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) { case VP8_STATUS_OK: buffer.setStatus(ImageFrame::FrameComplete); WebPIDelete(m_decoder); m_decoder = 0; return true; case VP8_STATUS_SUSPENDED: return false; default: WebPIDelete(m_decoder); m_decoder = 0; return setFailed(); } }