TEST(ImageDecoderTest, requiredPreviousFrameIndexDisposeOverwriteBgcolor) { std::unique_ptr<TestImageDecoder> decoder(wrapUnique(new TestImageDecoder())); decoder->initFrames(3); Vector<ImageFrame, 1>& frameBuffers = decoder->frameBufferCache(); // Fully covering DisposeOverwriteBgcolor previous frame resets the starting state. frameBuffers[1].setDisposalMethod(ImageFrame::DisposeOverwriteBgcolor); decoder->resetRequiredPreviousFrames(); EXPECT_EQ(kNotFound, frameBuffers[2].requiredPreviousFrameIndex()); // Partially covering DisposeOverwriteBgcolor previous frame is required by this frame. frameBuffers[1].setOriginalFrameRect(IntRect(50, 50, 50, 50)); decoder->resetRequiredPreviousFrames(); EXPECT_EQ(1u, frameBuffers[2].requiredPreviousFrameIndex()); }
void Selector::UpdateViewRay() { // TODO: Maybe should be placed in frustum, or somehere else, because it depends greatly // on the projection matrix Math::PerspectiveDecoder decoder(m_projection_matrix); float x = decoder.GetWidthToHeightAspectRatio()*( -1 + 2.0f * m_cursor.X() / m_screen_size.X()); float y = (-1 + 2.0f * m_cursor.Y() / m_screen_size.Y()); auto m = m_view_matrix.Inversed(); Math::vec3 origin = m * Math::vec3(0,0,0); auto vv = m * Math::vec4(x, y, -1.0f / tan(decoder.GetFovY() / 2.0f), 1); Math::vec3 destination(vv[0] / vv[3], vv[1] / vv[3], vv[2] / vv[3]); m_world_screen_point = destination; m_view_ray.SetOriginDestination(origin, origin + (destination - origin).Normalized() * m_selection_depth); }
std::shared_ptr<Video::MovieAudioDecoder> MovieAudioFactory::createDecoder(Video::VideoState* videoState) { std::shared_ptr<MWSound::MovieAudioDecoder> decoder(new MWSound::MovieAudioDecoder(videoState)); decoder->setupFormat(); MWBase::SoundManager *sndMgr = MWBase::Environment::get().getSoundManager(); MWBase::SoundStream *sound = sndMgr->playTrack(decoder->mDecoderBridge, MWSound::Type::Movie); if (!sound) { decoder.reset(); return decoder; } decoder->mAudioTrack = sound; return decoder; }
TEST(ImageDecoderTest, clearCacheExceptFrameAll) { const size_t numFrames = 10; std::unique_ptr<TestImageDecoder> decoder(wrapUnique(new TestImageDecoder())); decoder->initFrames(numFrames); Vector<ImageFrame, 1>& frameBuffers = decoder->frameBufferCache(); for (size_t i = 0; i < numFrames; ++i) frameBuffers[i].setStatus(i % 2 ? ImageFrame::FramePartial : ImageFrame::FrameComplete); decoder->clearCacheExceptFrame(kNotFound); for (size_t i = 0; i < numFrames; ++i) { SCOPED_TRACE(testing::Message() << i); EXPECT_EQ(ImageFrame::FrameEmpty, frameBuffers[i].getStatus()); } }
bool FFMpegStream::readFrame(AVFrame *frame) { typedef int (*decoder_t)(AVCodecContext*,AVFrame*,int*,const AVPacket*); decoder_t decoder = type == TYPE_VIDEO ? &avcodec_decode_video2 : &avcodec_decode_audio4; int got_frame = 0; while (!got_frame) { if (!readPacket()) return false; if (decoder(codecContext, frame, &got_frame, &packet) < 0) return false; } return true; }
MediaDataDecoder* WMFDecoderModule::CreateAACDecoder(uint32_t aChannelCount, uint32_t aSampleRate, uint16_t aBitsPerSample, const uint8_t* aUserData, uint32_t aUserDataLength) { nsAutoPtr<WMFAudioDecoder> decoder(new WMFAudioDecoder()); nsresult rv = decoder->Init(aChannelCount, aSampleRate, aBitsPerSample, aUserData, aUserDataLength); NS_ENSURE_SUCCESS(rv, nullptr); return decoder.forget(); }
bool compareMonoSysex(uint8_t *buf, uint8_t *buf2) { uint8_t buftmp[8192]; uint8_t buftmp2[8192]; uint16_t len = 0x1978; MNMSysexDecoder decoder(DATA_ENCODER_INIT(buf + 10, len - 10)); decoder.get(buftmp + 1, len - 10); MNMSysexDecoder decoder2(DATA_ENCODER_INIT(buf2 + 10, - 10)); decoder2.get(buftmp2 + 1, len - 10); for (uint16_t i = 1; i < len - 10; i++) { if (buftmp[i] != buftmp2[i]) { printf("mono sysex different at 0x%x, %x != %x\n", i, buftmp[i], buftmp2[i]); return false; } } return true; }
void decompressAdaptive(std::istream& in, std::ostream& out) { AdaptiveDataModel dataModel(NUM_SYMBOLS); ArithmeticDecoder decoder(std::make_shared<BitStreamReader>(&in)); for (;;) { auto symbol = decoder.decode(&dataModel); if (symbol >= NUM_SYMBOLS) throw std::runtime_error("Read bad symbol value. Symbols are expected to be 1 byte long."); // on ending symbol exit loop if (symbol == NUM_SYMBOLS - 1) break; out << static_cast<char>(symbol); } }
GError * metautils_unpack_bodyv (GByteArray **bodyv, GSList **result, body_decoder_f decoder) { GError *err = NULL; GSList *items = NULL; for (GByteArray **p=bodyv; *p && !err ;++p) { GSList *l = NULL; if (!decoder (&l, (*p)->data, (*p)->len, NULL)) err = NEWERROR (CODE_PROXY_ERROR, "Bad payload from service"); else items = metautils_gslist_precat (items, l); } *result = items; return err; }
QString readFixedLengthStringFromBuffer(const void *buffer, size_t bufferSize, size_t pos, uint length, QTextCodec *codec) { if(codec == NULL) { throw std::runtime_error("Codec is NULL"); } QScopedArrayPointer<quint8> stringIn( new quint8[static_cast<size_t>(length)]); if((pos + static_cast<size_t>(length)) <= bufferSize) { size_t last = pos + static_cast<size_t>(length); size_t strPtr = static_cast<size_t>(0); for(size_t ptr = pos; ptr < last; ptr++) { stringIn[strPtr] = (reinterpret_cast<const quint8 *>(buffer))[ptr]; strPtr++; } } else { size_t last = bufferSize; size_t strPtr = static_cast<size_t>(0); for(size_t ptr = pos; ptr < last; ptr++) { stringIn[strPtr] = (reinterpret_cast<const quint8 *>(buffer))[ptr]; strPtr++; } for(size_t ptr = strPtr; ptr < static_cast<size_t>(length); ptr++) { stringIn[ptr] = 0; } } uint stringLength = qstrnlen( reinterpret_cast<char *> (stringIn.data()), length); QScopedPointer<QTextDecoder> decoder(codec->makeDecoder()); if (decoder.isNull()) { throw std::runtime_error("Unable to create text decoder"); } return decoder->toUnicode( reinterpret_cast<const char *> (stringIn.data()), static_cast<int> (stringLength)); }
CHeaderData* CSoundCardRepeaterTXRXThread::processFECHeader() { bool buffer[FEC_SECTION_LENGTH_BITS]; m_bitBuffer.getData(buffer, FEC_SECTION_LENGTH_BITS); CRadioHeaderDecoder decoder(buffer, FEC_SECTION_LENGTH_BITS); CHeaderData* header = decoder.getHeaderData(); if (header == NULL) return NULL; m_headerBER = decoder.getBER(); wxLogMessage(wxT("Radio header decoded - My: %s/%s Your: %s Rpt1: %s Rpt2: %s Flags: %02X %02X %02X BER: %u%%"), header->getMyCall1().c_str(), header->getMyCall2().c_str(), header->getYourCall().c_str(), header->getRptCall1().c_str(), header->getRptCall2().c_str(), header->getFlag1(), header->getFlag2(), header->getFlag3(), m_headerBER); return header; }
unsigned int LoadSound(const char * filename, ALCcontext * context) { alcMakeContextCurrent(context); OggDecoder decoder(filename); decoder.GetInfo(); decoder.FillBuffer(); unsigned int buffer; alGenBuffers(1, &buffer); alBufferData(buffer, decoder.GetChannels() > 1 ? AL_FORMAT_STEREO16 : AL_FORMAT_MONO16, decoder.GetData(), decoder.GetBufferSize(), decoder.GetSampleRate()); decoder.ClearBuffer(); return buffer; }
void evaluate_fitness(Population * population, mySlimTree* SlimTree, TCity * queryObject, stDistance range) { double x, y; long i; for( i = 0; i < POPULATION_SIZE; ++i) { decoder(&population->individuals[i]); if(MAXIMIZATION) { population->individuals[i].fitness = objective_function(population->individuals[i].phenotype, SlimTree, queryObject, range); } else { population->individuals[i].fitness = (-1 * objective_function(population->individuals[i].phenotype, SlimTree, queryObject, range)); } } }
TEST(invalidDecoder, CodecLibrary) { HBitmapDecoder decoder(new WindowsBitmapDecoder); bool isInvalidDecoder = false; try { decoder->createIterator(); } catch (const InvalidDecoder&) { isInvalidDecoder = true; } CHECK(isInvalidDecoder); }
static already_AddRefed<MediaDataDecoderProxy> CreateDecoderWrapper(MediaDataDecoderCallback* aCallback, CDMProxy* aProxy, FlushableTaskQueue* aTaskQueue) { nsCOMPtr<mozIGeckoMediaPluginService> gmpService = do_GetService("@mozilla.org/gecko-media-plugin-service;1"); if (!gmpService) { return nullptr; } nsCOMPtr<nsIThread> thread; nsresult rv = gmpService->GetThread(getter_AddRefs(thread)); if (NS_FAILED(rv)) { return nullptr; } RefPtr<MediaDataDecoderProxy> decoder(new EMEMediaDataDecoderProxy(thread, aCallback, aProxy, aTaskQueue)); return decoder.forget(); }
int main(void) { int i, sic, eic; U16BIT I; reset_encoder(); reset_decoder(); sic = __time / 2; for (i = 0; i < sizeof(Input) / sizeof(U16BIT); i++) decoder(encoder(Input[i])); eic = __time / 2; printf("\nInstruction cycles for transcoding a sample: %d\n", (eic - sic) / (sizeof(Input) / sizeof(U16BIT))); return (0); }
bool GraphicsContext3D::getImageData(Image* image, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool ignoreGammaAndColorProfile, Vector<uint8_t>& outputVector) { if (!image) return false; OwnPtr<NativeImageSkia> pixels; NativeImageSkia* skiaImage = 0; AlphaOp neededAlphaOp = AlphaDoNothing; bool hasAlpha = image->isBitmapImage() ? static_cast<BitmapImage*>(image)->frameHasAlphaAtIndex(0) : true; if ((ignoreGammaAndColorProfile || (hasAlpha && !premultiplyAlpha)) && image->data()) { ImageSource decoder(ImageSource::AlphaNotPremultiplied, ignoreGammaAndColorProfile ? ImageSource::GammaAndColorProfileIgnored : ImageSource::GammaAndColorProfileApplied); // Attempt to get raw unpremultiplied image data decoder.setData(image->data(), true); if (!decoder.frameCount() || !decoder.frameIsCompleteAtIndex(0)) return false; hasAlpha = decoder.frameHasAlphaAtIndex(0); pixels = adoptPtr(decoder.createFrameAtIndex(0)); if (!pixels.get() || !pixels->isDataComplete() || !pixels->width() || !pixels->height()) return false; SkBitmap::Config skiaConfig = pixels->config(); if (skiaConfig != SkBitmap::kARGB_8888_Config) return false; skiaImage = pixels.get(); if (hasAlpha && premultiplyAlpha) neededAlphaOp = AlphaDoPremultiply; } else { skiaImage = image->nativeImageForCurrentFrame(); if (!premultiplyAlpha && hasAlpha) neededAlphaOp = AlphaDoUnmultiply; } if (!skiaImage) return false; SkBitmap& skiaImageRef = *skiaImage; SkAutoLockPixels lock(skiaImageRef); ASSERT(skiaImage->rowBytes() == skiaImage->width() * 4); outputVector.resize(skiaImage->rowBytes() * skiaImage->height()); return packPixels(reinterpret_cast<const uint8_t*>(skiaImage->getPixels()), SourceFormatBGRA8, skiaImage->width(), skiaImage->height(), 0, format, type, neededAlphaOp, outputVector.data()); }
virtual int decode( gpointer data, gsize size, TPImage * image ) { if ( ! enabled() ) { tplog( " EXTERNAL IMAGE DECODER IS DISABLED" ); return TP_IMAGE_UNSUPPORTED_FORMAT; } tplog( " INVOKING EXTERNAL DECODER WITH BUFFER OF %d BYTES", size ); int result = decoder( data, size, image, decoder_data ); tplog( " EXTERNAL DECODER RETURNED %d", result ); if ( result == TP_IMAGE_DECODE_OK ) { tplog( " pixels : %p", image->pixels ); tplog( " width : %u", image->width ); tplog( " height : %u", image->height ); tplog( " pitch : %u", image->pitch ); tplog( " depth : %u", image->depth ); tplog( " bgr : %u", image->bgr ); tplog( " pm_alpha : %u", image->pm_alpha ); tplog( " free_image : %p", image->free_image ); g_assert( image->pixels != NULL ); g_assert( image->pitch >= image->width * image->depth ); g_assert( image->depth == 3 || image->depth == 4 ); g_assert( image->bgr == 0 ); Image::premultiply_alpha( image ); } else { g_assert( image->pixels == NULL ); // We change to unsupported format so that no matter what // the external decoder does, we try the internal decoders. result = TP_IMAGE_UNSUPPORTED_FORMAT; } return result; }
bool GraphicsContext3D::getImageData(Image* image, unsigned int format, unsigned int type, bool premultiplyAlpha, bool ignoreGammaAndColorProfile, Vector<uint8_t>& outputVector) { if (!image) return false; // We need this to stay in scope because the native image is just a shallow copy of the data. ImageSource decoder(premultiplyAlpha ? ImageSource::AlphaPremultiplied : ImageSource::AlphaNotPremultiplied, ignoreGammaAndColorProfile ? ImageSource::GammaAndColorProfileIgnored : ImageSource::GammaAndColorProfileApplied); AlphaOp alphaOp = AlphaDoNothing; RefPtr<cairo_surface_t> imageSurface; if (image->data()) { decoder.setData(image->data(), true); if (!decoder.frameCount() || !decoder.frameIsCompleteAtIndex(0)) return false; imageSurface = decoder.createFrameAtIndex(0); } else { imageSurface = image->nativeImageForCurrentFrame(); if (!premultiplyAlpha) alphaOp = AlphaDoUnmultiply; } if (!imageSurface) return false; int width = cairo_image_surface_get_width(imageSurface.get()); int height = cairo_image_surface_get_height(imageSurface.get()); if (!width || !height) return false; if (cairo_image_surface_get_format(imageSurface.get()) != CAIRO_FORMAT_ARGB32) return false; unsigned int srcUnpackAlignment = 1; size_t bytesPerRow = cairo_image_surface_get_stride(imageSurface.get()); size_t bitsPerPixel = 32; unsigned int padding = bytesPerRow - bitsPerPixel / 8 * width; if (padding) { srcUnpackAlignment = padding + 1; while (bytesPerRow % srcUnpackAlignment) ++srcUnpackAlignment; } outputVector.resize(width * height * 4); return packPixels(cairo_image_surface_get_data(imageSurface.get()), SourceFormatBGRA8, width, height, srcUnpackAlignment, format, type, alphaOp, outputVector.data()); }
TemporaryRef<MFTDecoder> WMFAudioMFTManager::Init() { NS_ENSURE_TRUE(mStreamType != Unknown, nullptr); RefPtr<MFTDecoder> decoder(new MFTDecoder()); HRESULT hr = decoder->Create(GetMFTGUID()); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); // Setup input/output media types RefPtr<IMFMediaType> type; hr = wmf::MFCreateMediaType(byRef(type)); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); hr = type->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID()); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); hr = type->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, mAudioRate); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); hr = type->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, mAudioChannels); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); if (mStreamType == AAC) { hr = type->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x0); // Raw AAC packet NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); hr = type->SetBlob(MF_MT_USER_DATA, mUserData.Elements(), mUserData.Length()); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); } hr = decoder->SetMediaTypes(type, MFAudioFormat_PCM); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); mDecoder = decoder; return decoder.forget(); }
void AnalyzeFileTask::run() { qDebug() << "Analyzing file" << m_path; AnalyzeResult *result = new AnalyzeResult(); result->fileName = m_path; TagReader tags(m_path); if (!tags.read()) { result->error = true; result->errorMessage = "Couldn't read metadata"; emit finished(result); return; } result->mbid = tags.mbid(); result->length = tags.length(); result->bitrate = tags.bitrate(); if (result->length < 10) { result->error = true; result->errorMessage = "Too short audio stream, should be at least 10 seconds"; emit finished(result); return; } Decoder decoder(qPrintable(m_path)); if (!decoder.Open()) { result->error = true; result->errorMessage = QString("Couldn't open the file: ") + QString::fromStdString(decoder.LastError()); emit finished(result); return; } FingerprintCalculator fpcalculator; if (!fpcalculator.start(decoder.SampleRate(), decoder.Channels())) { result->error = true; result->errorMessage = "Error while fingerpriting the file"; emit finished(result); return; } decoder.Decode(&fpcalculator, AUDIO_LENGTH); result->fingerprint = fpcalculator.finish(); emit finished(result); }
static already_AddRefed<MediaDataDecoder> CreateTestAACDecoder(AudioInfo& aConfig) { PlatformDecoderModule::Init(); nsRefPtr<PlatformDecoderModule> platform = PlatformDecoderModule::Create(); if (!platform) { return nullptr; } nsRefPtr<MediaDataDecoder> decoder( platform->CreateDecoder(aConfig, nullptr, nullptr)); if (!decoder) { return nullptr; } return decoder.forget(); }
static already_AddRefed<MediaDataDecoder> CreateTestAACDecoder(AudioInfo& aConfig) { PlatformDecoderModule::Init(); nsRefPtr<PlatformDecoderModule> platform = PlatformDecoderModule::Create(); if (!platform || !platform->SupportsMimeType(NS_LITERAL_CSTRING("audio/mp4a-latm"))) { return nullptr; } nsRefPtr<MediaDataDecoder> decoder( platform->CreateDecoder(aConfig, nullptr, nullptr)); if (!decoder) { return nullptr; } return decoder.forget(); }
void decodeCurrFile() { if (fCurrFile.size() == 0) { fDecodeSucceeded = false; return; } SkFILEStream stream(fCurrFile.c_str()); SkAutoTDelete<SkImageDecoder> decoder(SkImageDecoder::Factory(&stream)); if (nullptr == decoder.get()) { fDecodeSucceeded = false; return; } if (!fPremul) { decoder->setRequireUnpremultipliedColors(true); } fDecodeSucceeded = decoder->decode(&stream, &fBitmap, kN32_SkColorType, SkImageDecoder::kDecodePixels_Mode) != SkImageDecoder::kFailure; this->inval(nullptr); }
/** @throw std::exception */ const CrawlData LevCrawlDb::getData(const CrawlId& crawlId) { string crawlId_str = std::to_string(crawlId); string retrieved_str; CrawlData retrieved; auto status = mDatabase->Get(leveldb::ReadOptions(), crawlId_str, &retrieved_str); if (status.ok()) { ParcelDecoder decoder(retrieved_str.data(), retrieved_str.size()); decoder.decode(&retrieved); return retrieved; } else { throw std::runtime_error("No data corresponding to crawlId = " + crawlId_str); } return retrieved; }
void CIIRegionEncoding::dump() const { std::cout << "Domain size: " << this->domain_size << std::endl; std::cout << "Inverted flag? " << (this->is_inverted ? 'Y' : 'N') << std::endl; int mod = 0; ProgressiveCIIDecoder decoder(*this); while (decoder.has_top()) { std::cout << decoder.top() << " "; if ((++mod %= 16) == 0) std::cout << std::endl; decoder.next(); } if (mod != 0) std::cout << std::endl; }
WebImage WebImage::fromData(const WebData& data, const WebSize& desiredSize) { RefPtr<SharedBuffer> buffer = PassRefPtr<SharedBuffer>(data); OwnPtr<ImageDecoder> decoder(ImageDecoder::create(*buffer.get(), ImageSource::AlphaPremultiplied, ImageSource::GammaAndColorProfileIgnored)); if (!decoder) return WebImage(); decoder->setData(buffer.get(), true); if (!decoder->isSizeAvailable()) return WebImage(); // Frames are arranged by decreasing size, then decreasing bit depth. // Pick the frame closest to |desiredSize|'s area without being smaller, // which has the highest bit depth. const size_t frameCount = decoder->frameCount(); size_t index = 0; // Default to first frame if none are large enough. int frameAreaAtIndex = 0; for (size_t i = 0; i < frameCount; ++i) { const IntSize frameSize = decoder->frameSizeAtIndex(i); if (WebSize(frameSize) == desiredSize) { index = i; break; // Perfect match. } const int frameArea = frameSize.width() * frameSize.height(); if (frameArea < (desiredSize.width * desiredSize.height)) break; // No more frames that are large enough. if (!i || (frameArea < frameAreaAtIndex)) { index = i; // Closer to desired area than previous best match. frameAreaAtIndex = frameArea; } } ImageFrame* frame = decoder->frameBufferAtIndex(index); if (!frame) return WebImage(); RefPtr<NativeImageSkia> image = frame->asNewNativeImage(); if (!image) return WebImage(); return WebImage(image->bitmap()); }
bool MDGlobal::fromSysex(uint8_t *data, uint16_t len) { if (len != 0xC4 - 6) { // printf("wrong length\n"); // wrong length return false; } if (!ElektronHelper::checkSysexChecksum(data, len)) { // printf("wrong checksum\n"); return false; } origPosition = data[3]; ElektronSysexDecoder decoder(DATA_ENCODER_INIT(data + 4, len - 4)); decoder.stop7Bit(); decoder.get(drumRouting, 16); decoder.start7Bit(); decoder.get(keyMap, 128); decoder.stop7Bit(); decoder.get8(&baseChannel); decoder.get8(&unused); decoder.get16(&tempo); decoder.getb(&extendedMode); uint8_t byte = 0; decoder.get8(&byte); clockIn = IS_BIT_SET(byte, 0); transportIn = IS_BIT_SET(byte, 4); clockOut = IS_BIT_SET(byte, 5); transportOut = IS_BIT_SET(byte, 6); decoder.getb(&localOn); decoder.get(&drumLeft, 12); for (int i = 0; i < 128; i++) { if (keyMap[i] < 16) { drumMapping[keyMap[i]] = i; } } return true; }
void PendingBidCoSQueues::unserialize(std::shared_ptr<std::vector<char>> serializedData, BidCoSPeer* peer, HomeMaticDevice* device) { try { BaseLib::BinaryDecoder decoder(GD::bl); uint32_t position = 0; _queuesMutex.lock(); uint32_t pendingQueuesSize = decoder.decodeInteger(*serializedData, position); for(uint32_t i = 0; i < pendingQueuesSize; i++) { uint32_t queueLength = decoder.decodeInteger(*serializedData, position); std::shared_ptr<BidCoSQueue> queue(new BidCoSQueue()); queue->unserialize(serializedData, device, position); position += queueLength; queue->noSending = true; bool hasCallbackFunction = decoder.decodeBoolean(*serializedData, position); if(hasCallbackFunction) { std::shared_ptr<CallbackFunctionParameter> parameters(new CallbackFunctionParameter()); parameters->integers.push_back(decoder.decodeInteger(*serializedData, position)); parameters->strings.push_back(decoder.decodeString(*serializedData, position)); parameters->integers.push_back(decoder.decodeInteger(*serializedData, position)); parameters->integers.push_back(decoder.decodeInteger(*serializedData, position) * 1000); queue->callbackParameter = parameters; queue->queueEmptyCallback = delegate<void (std::shared_ptr<CallbackFunctionParameter>)>::from_method<BidCoSPeer, &BidCoSPeer::addVariableToResetCallback>(peer); } queue->pendingQueueID = _currentID++; _queues.push_back(queue); } } catch(const std::exception& ex) { GD::out.printEx(__FILE__, __LINE__, __PRETTY_FUNCTION__, ex.what()); } catch(BaseLib::Exception& ex) { GD::out.printEx(__FILE__, __LINE__, __PRETTY_FUNCTION__, ex.what()); } catch(...) { GD::out.printEx(__FILE__, __LINE__, __PRETTY_FUNCTION__); } _queuesMutex.unlock(); }
bool GraphicsContext3D::getImageData(Image* image, unsigned int format, unsigned int type, bool premultiplyAlpha, bool ignoreGammaAndColorProfile, Vector<uint8_t>& outputVector) { if (!image) return false; OwnPtr<NativeImageSkia> pixels; NativeImageSkia* skiaImage = 0; AlphaOp neededAlphaOp = AlphaDoNothing; if (image->data()) { ImageSource decoder(ImageSource::AlphaNotPremultiplied, ignoreGammaAndColorProfile ? ImageSource::GammaAndColorProfileIgnored : ImageSource::GammaAndColorProfileApplied); decoder.setData(image->data(), true); if (!decoder.frameCount() || !decoder.frameIsCompleteAtIndex(0)) return false; bool hasAlpha = decoder.frameHasAlphaAtIndex(0); pixels = adoptPtr(decoder.createFrameAtIndex(0)); if (!pixels.get() || !pixels->isDataComplete() || !pixels->width() || !pixels->height()) return false; SkBitmap::Config skiaConfig = pixels->config(); if (skiaConfig != SkBitmap::kARGB_8888_Config) return false; skiaImage = pixels.get(); if (hasAlpha && premultiplyAlpha) neededAlphaOp = AlphaDoPremultiply; } else { // This is a special case for texImage2D with HTMLCanvasElement input. skiaImage = image->nativeImageForCurrentFrame(); if (!premultiplyAlpha) neededAlphaOp = AlphaDoUnmultiply; } if (!skiaImage) return false; SkBitmap& skiaImageRef = *skiaImage; SkAutoLockPixels lock(skiaImageRef); ASSERT(skiaImage->rowBytes() == skiaImage->width() * 4); outputVector.resize(skiaImage->rowBytes() * skiaImage->height()); return packPixels(reinterpret_cast<const uint8_t*>(skiaImage->getPixels()), SourceFormatBGRA8, skiaImage->width(), skiaImage->height(), 0, format, type, neededAlphaOp, outputVector.data()); }