Exemplo n.º 1
0
void QQnxRasterWindow::resetBuffers()
{
    // Buffers were destroyed; reacquire them
    m_currentBufferIndex = -1;
    m_previousDirty = QRegion();
    m_scrolled = QRegion();
    if (window()->parent() && bufferSize() == QSize(1,1)) {
        // If we have a parent then we're not really rendering.  But if we don't render we'll
        // be invisible and any children won't show up.  This should be harmless since we're
        // rendering into a 1x1 window that has transparency set to discard.
        renderBuffer();
        post(QRegion(0,0,1,1));
    }
}
int PreviewStream::processFrame(CameraFrame *frame)
{
    status_t ret = NO_ERROR;

    if (mShowFps) {
        showFps();
    }

    ret = renderBuffer(frame);
    if (ret != NO_ERROR) {
        FLOGE("%s renderBuffer failed, state %d", __FUNCTION__, frame->getState());
        goto err_exit;
    }
    //the frame held in service.
    frame->addReference();

    StreamBuffer buffer;
    ret = requestBuffer(&buffer);
    if (ret != NO_ERROR) {
        FLOGE("%s requestBuffer failed", __FUNCTION__);
        goto err_exit;
    }

    for (int i = 0; i < mTotalBuffers; i++) {
        if (mCameraBuffer[i].mBufHandle == buffer.mBufHandle) {
            //release frame from service.
            mCameraBuffer[i].release();
            break;
        }
    }

err_exit:
    sem_post(&mRespondSem);

    return ret;
}
//TODO: Add error handling. (Adding default texture to know if something went wrong, etc.)
void RenderTarget::initAndBindRenderTargetTextures(unsigned int width, unsigned int height, unsigned int mask)
{
	assert(IsValidMask(mask));

	if((RenderTargetTextureCoponents::Color & mask) == RenderTargetTextureCoponents::Color)
	{
		glGenTextures(1, &m_renderTex2DHandle);
		//glActiveTexture(GL_TEXTURE0);  // Use texture unit 0 
		// "Bind" the newly created texture : all future texture functions will modify this texture
		glBindTexture(GL_TEXTURE_2D, m_renderTex2DHandle);
 
		// Give an empty image to OpenGL ( the last "0" )
		glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
 
		// Poor filtering. Needed !
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

		glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, m_renderTex2DHandle, 0);
	}

	if((RenderTargetTextureCoponents::Depth & mask) == RenderTargetTextureCoponents::Depth)
	{
		glGenRenderbuffers(1, &m_depthBufHandle);
		glBindRenderbuffer(GL_RENDERBUFFER, m_depthBufHandle);
		glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, width, height);

		// Bind the depth buffer to the FBO 
		glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, m_depthBufHandle); 
	}

	// Init default texture after
	//initDefaultTexture();

	renderBuffer();
}
Exemplo n.º 4
0
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatSetDataSource:
        {
            ALOGV("kWhatSetDataSource");

            CHECK(mSource == NULL);

            sp<RefBase> obj;
            CHECK(msg->findObject("source", &obj));

            mSource = static_cast<Source *>(obj.get());

            looper()->registerHandler(mSource);

            CHECK(mDriver != NULL);
            sp<NuPlayerDriver> driver = mDriver.promote();
            if (driver != NULL) {
                driver->notifySetDataSourceCompleted(OK);
            }
            break;
        }

        case kWhatPrepare:
        {
            mSource->prepareAsync();
            break;
        }

        case kWhatGetTrackInfo:
        {
            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            status_t err = INVALID_OPERATION;
            if (mSource != NULL) {
                Parcel* reply;
                CHECK(msg->findPointer("reply", (void**)&reply));
                err = mSource->getTrackInfo(reply);
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            response->postReply(replyID);
            break;
        }

        case kWhatSelectTrack:
        {
            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            status_t err = INVALID_OPERATION;
            if (mSource != NULL) {
                size_t trackIndex;
                int32_t select;
                CHECK(msg->findSize("trackIndex", &trackIndex));
                CHECK(msg->findInt32("select", &select));
                err = mSource->selectTrack(trackIndex, select);
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            response->postReply(replyID);
            break;
        }

        case kWhatPollDuration:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mPollDurationGeneration) {
                // stale
                break;
            }

            int64_t durationUs;
            if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
                sp<NuPlayerDriver> driver = mDriver.promote();
                if (driver != NULL) {
                    driver->notifyDuration(durationUs);
                }
            }

            msg->post(1000000ll);  // poll again in a second.
            break;
        }

        case kWhatSetVideoNativeWindow:
        {
            ALOGV("kWhatSetVideoNativeWindow");

            mDeferredActions.push_back(
                    new ShutdownDecoderAction(
                        false /* audio */, true /* video */));

            sp<RefBase> obj;
            CHECK(msg->findObject("native-window", &obj));

            mDeferredActions.push_back(
                    new SetSurfaceAction(
                        static_cast<NativeWindowWrapper *>(obj.get())));

            if (obj != NULL) {
                // If there is a new surface texture, instantiate decoders
                // again if possible.
                mDeferredActions.push_back(
                        new SimpleAction(&NuPlayer::performScanSources));
            }

            processDeferredActions();
            break;
        }

        case kWhatSetAudioSink:
        {
            ALOGV("kWhatSetAudioSink");

            sp<RefBase> obj;
            CHECK(msg->findObject("sink", &obj));

            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
            break;
        }

        case kWhatStart:
        {
            ALOGV("kWhatStart");

            mVideoIsAVC = false;
            mAudioEOS = false;
            mVideoEOS = false;
            mSkipRenderingAudioUntilMediaTimeUs = -1;
            mSkipRenderingVideoUntilMediaTimeUs = -1;
            mVideoLateByUs = 0;
            mNumFramesTotal = 0;
            mNumFramesDropped = 0;
            mStarted = true;

            mSource->start();

            uint32_t flags = 0;

            if (mSource->isRealTime()) {
                flags |= Renderer::FLAG_REAL_TIME;
            }

            mRenderer = new Renderer(
                    mAudioSink,
                    new AMessage(kWhatRendererNotify, id()),
                    flags);

            looper()->registerHandler(mRenderer);

            postScanSources();
            break;
        }

        case kWhatScanSources:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mScanSourcesGeneration) {
                // Drop obsolete msg.
                break;
            }

            mScanSourcesPending = false;

            ALOGV("scanning sources haveAudio=%d, haveVideo=%d",
                 mAudioDecoder != NULL, mVideoDecoder != NULL);

            bool mHadAnySourcesBefore =
                (mAudioDecoder != NULL) || (mVideoDecoder != NULL);

            if (mNativeWindow != NULL) {
                instantiateDecoder(false, &mVideoDecoder);
            }

            if (mAudioSink != NULL) {
                instantiateDecoder(true, &mAudioDecoder);
            }

            if (!mHadAnySourcesBefore
                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
                // This is the first time we've found anything playable.

                if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) {
                    schedulePollDuration();
                }
            }

            status_t err;
            if ((err = mSource->feedMoreTSData()) != OK) {
                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
                    // We're not currently decoding anything (no audio or
                    // video tracks found) and we just ran out of input data.

                    if (err == ERROR_END_OF_STREAM) {
                        notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                    } else {
                        notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
                    }
                }
                break;
            }

            if ((mAudioDecoder == NULL && mAudioSink != NULL)
                    || (mVideoDecoder == NULL && mNativeWindow != NULL)) {
                msg->post(100000ll);
                mScanSourcesPending = true;
            }
            break;
        }

        case kWhatVideoNotify:
        case kWhatAudioNotify:
        {
            bool audio = msg->what() == kWhatAudioNotify;

            sp<AMessage> codecRequest;
            CHECK(msg->findMessage("codec-request", &codecRequest));

            int32_t what;
            CHECK(codecRequest->findInt32("what", &what));

            if (what == ACodec::kWhatFillThisBuffer) {
                status_t err = feedDecoderInputData(
                        audio, codecRequest);

                if (err == -EWOULDBLOCK) {
                    if (mSource->feedMoreTSData() == OK) {
                        msg->post(10000ll);
                    }
                }
            } else if (what == ACodec::kWhatEOS) {
                int32_t err;
                CHECK(codecRequest->findInt32("err", &err));

                if (err == ERROR_END_OF_STREAM) {
                    ALOGV("got %s decoder EOS", audio ? "audio" : "video");
                } else {
                    ALOGV("got %s decoder EOS w/ error %d",
                         audio ? "audio" : "video",
                         err);
                }

                mRenderer->queueEOS(audio, err);
            } else if (what == ACodec::kWhatFlushCompleted) {
                bool needShutdown;

                if (audio) {
                    CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
                    mFlushingAudio = FLUSHED;
                } else {
                    CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
                    mFlushingVideo = FLUSHED;

                    mVideoLateByUs = 0;
                }

                ALOGV("decoder %s flush completed", audio ? "audio" : "video");

                if (needShutdown) {
                    ALOGV("initiating %s decoder shutdown",
                         audio ? "audio" : "video");

                    (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();

                    if (audio) {
                        mFlushingAudio = SHUTTING_DOWN_DECODER;
                    } else {
                        mFlushingVideo = SHUTTING_DOWN_DECODER;
                    }
                }

                finishFlushIfPossible();
            } else if (what == ACodec::kWhatOutputFormatChanged) {
                if (audio) {
                    int32_t numChannels;
                    CHECK(codecRequest->findInt32(
                                "channel-count", &numChannels));

                    int32_t sampleRate;
                    CHECK(codecRequest->findInt32("sample-rate", &sampleRate));

                    ALOGV("Audio output format changed to %d Hz, %d channels",
                         sampleRate, numChannels);

                    mAudioSink->close();

                    audio_output_flags_t flags;
                    int64_t durationUs;
                    // FIXME: we should handle the case where the video decoder
                    // is created after we receive the format change indication.
                    // Current code will just make that we select deep buffer
                    // with video which should not be a problem as it should
                    // not prevent from keeping A/V sync.
                    if (mVideoDecoder == NULL &&
                            mSource->getDuration(&durationUs) == OK &&
                            durationUs
                                > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
                        flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
                    } else {
                        flags = AUDIO_OUTPUT_FLAG_NONE;
                    }

                    int32_t channelMask;
                    if (!codecRequest->findInt32("channel-mask", &channelMask)) {
                        channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
                    }

                    CHECK_EQ(mAudioSink->open(
                                sampleRate,
                                numChannels,
                                (audio_channel_mask_t)channelMask,
                                AUDIO_FORMAT_PCM_16_BIT,
                                8 /* bufferCount */,
                                NULL,
                                NULL,
                                flags),
                             (status_t)OK);
                    mAudioSink->start();

                    mRenderer->signalAudioSinkChanged();
                } else {
                    // video

                    int32_t width, height;
                    CHECK(codecRequest->findInt32("width", &width));
                    CHECK(codecRequest->findInt32("height", &height));

                    int32_t cropLeft, cropTop, cropRight, cropBottom;
                    CHECK(codecRequest->findRect(
                                "crop",
                                &cropLeft, &cropTop, &cropRight, &cropBottom));

                    int32_t displayWidth = cropRight - cropLeft + 1;
                    int32_t displayHeight = cropBottom - cropTop + 1;

                    ALOGV("Video output format changed to %d x %d "
                         "(crop: %d x %d @ (%d, %d))",
                         width, height,
                         displayWidth,
                         displayHeight,
                         cropLeft, cropTop);

                    sp<AMessage> videoInputFormat =
                        mSource->getFormat(false /* audio */);

                    // Take into account sample aspect ratio if necessary:
                    int32_t sarWidth, sarHeight;
                    if (videoInputFormat->findInt32("sar-width", &sarWidth)
                            && videoInputFormat->findInt32(
                                "sar-height", &sarHeight)) {
                        ALOGV("Sample aspect ratio %d : %d",
                              sarWidth, sarHeight);

                        displayWidth = (displayWidth * sarWidth) / sarHeight;

                        ALOGV("display dimensions %d x %d",
                              displayWidth, displayHeight);
                    }

                    notifyListener(
                            MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight);
                }
            } else if (what == ACodec::kWhatShutdownCompleted) {
                ALOGV("%s shutdown completed", audio ? "audio" : "video");
                if (audio) {
                    mAudioDecoder.clear();

                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
                    mFlushingAudio = SHUT_DOWN;
                } else {
                    mVideoDecoder.clear();

                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
                    mFlushingVideo = SHUT_DOWN;
                }

                finishFlushIfPossible();
            } else if (what == ACodec::kWhatError) {
                ALOGE("Received error from %s decoder, aborting playback.",
                     audio ? "audio" : "video");

                mRenderer->queueEOS(audio, UNKNOWN_ERROR);
            } else if (what == ACodec::kWhatDrainThisBuffer) {
                renderBuffer(audio, codecRequest);
            } else if (what != ACodec::kWhatComponentAllocated
                    && what != ACodec::kWhatComponentConfigured
                    && what != ACodec::kWhatBuffersAllocated) {
                ALOGV("Unhandled codec notification %d '%c%c%c%c'.",
                      what,
                      what >> 24,
                      (what >> 16) & 0xff,
                      (what >> 8) & 0xff,
                      what & 0xff);
            }

            break;
        }

        case kWhatRendererNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));

            if (what == Renderer::kWhatEOS) {
                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                int32_t finalResult;
                CHECK(msg->findInt32("finalResult", &finalResult));

                if (audio) {
                    mAudioEOS = true;
                } else {
                    mVideoEOS = true;
                }

                if (finalResult == ERROR_END_OF_STREAM) {
                    ALOGV("reached %s EOS", audio ? "audio" : "video");
                } else {
                    ALOGE("%s track encountered an error (%d)",
                         audio ? "audio" : "video", finalResult);

                    notifyListener(
                            MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult);
                }

                if ((mAudioEOS || mAudioDecoder == NULL)
                        && (mVideoEOS || mVideoDecoder == NULL)) {
                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                }
            } else if (what == Renderer::kWhatPosition) {
                int64_t positionUs;
                CHECK(msg->findInt64("positionUs", &positionUs));

                CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));

                if (mDriver != NULL) {
                    sp<NuPlayerDriver> driver = mDriver.promote();
                    if (driver != NULL) {
                        driver->notifyPosition(positionUs);

                        driver->notifyFrameStats(
                                mNumFramesTotal, mNumFramesDropped);
                    }
                }
            } else if (what == Renderer::kWhatFlushComplete) {
                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
            } else if (what == Renderer::kWhatVideoRenderingStart) {
                notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
            } else if (what == Renderer::kWhatMediaRenderingStart) {
                ALOGV("media rendering started");
                notifyListener(MEDIA_STARTED, 0, 0);
            }
            break;
        }

        case kWhatMoreDataQueued:
        {
            break;
        }

        case kWhatReset:
        {
            ALOGV("kWhatReset");

            mDeferredActions.push_back(
                    new ShutdownDecoderAction(
                        true /* audio */, true /* video */));

            mDeferredActions.push_back(
                    new SimpleAction(&NuPlayer::performReset));

            processDeferredActions();
            break;
        }

        case kWhatSeek:
        {
            int64_t seekTimeUs;
            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));

            ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs);

            mDeferredActions.push_back(
                    new SimpleAction(&NuPlayer::performDecoderFlush));

            mDeferredActions.push_back(new SeekAction(seekTimeUs));

            processDeferredActions();
            break;
        }

        case kWhatPause:
        {
            CHECK(mRenderer != NULL);
            mSource->pause();
            mRenderer->pause();
            break;
        }

        case kWhatResume:
        {
            CHECK(mRenderer != NULL);
            mSource->resume();
            mRenderer->resume();
            break;
        }

        case kWhatSourceNotify:
        {
            onSourceNotify(msg);
            break;
        }

        default:
            TRESPASS();
            break;
    }
Exemplo n.º 5
0
bool GifTranscoder::resizeBoxFilter(GifFileType* gifIn, GifFileType* gifOut) {
    ASSERT(gifIn != NULL, "gifIn cannot be NULL");
    ASSERT(gifOut != NULL, "gifOut cannot be NULL");

    if (gifIn->SWidth < 0 || gifIn->SHeight < 0) {
        LOGE("Input GIF has invalid size: %d x %d", gifIn->SWidth, gifIn->SHeight);
        return false;
    }

    // Output GIF will be 50% the size of the original.
    if (EGifPutScreenDesc(gifOut,
                          gifIn->SWidth / 2,
                          gifIn->SHeight / 2,
                          gifIn->SColorResolution,
                          gifIn->SBackGroundColor,
                          gifIn->SColorMap) == GIF_ERROR) {
        LOGE("Could not write screen descriptor");
        return false;
    }
    LOGD("Wrote screen descriptor");

    // Index of the current image.
    int imageIndex = 0;

    // Transparent color of the current image.
    int transparentColor = NO_TRANSPARENT_COLOR;

    // Buffer for reading raw images from the input GIF.
    std::vector<GifByteType> srcBuffer(gifIn->SWidth * gifIn->SHeight);

    // Buffer for rendering images from the input GIF.
    std::unique_ptr<ColorARGB> renderBuffer(new ColorARGB[gifIn->SWidth * gifIn->SHeight]);

    // Buffer for writing new images to output GIF (one row at a time).
    std::unique_ptr<GifByteType> dstRowBuffer(new GifByteType[gifOut->SWidth]);

    // Many GIFs use DISPOSE_DO_NOT to make images draw on top of previous images. They can also
    // use DISPOSE_BACKGROUND to clear the last image region before drawing the next one. We need
    // to keep track of the disposal mode as we go along to properly render the GIF.
    int disposalMode = DISPOSAL_UNSPECIFIED;
    int prevImageDisposalMode = DISPOSAL_UNSPECIFIED;
    GifImageDesc prevImageDimens;

    // Background color (applies to entire GIF).
    ColorARGB bgColor = TRANSPARENT;

    GifRecordType recordType;
    do {
        if (DGifGetRecordType(gifIn, &recordType) == GIF_ERROR) {
            LOGE("Could not get record type");
            return false;
        }
        LOGD("Read record type: %d", recordType);
        switch (recordType) {
            case IMAGE_DESC_RECORD_TYPE: {
                if (DGifGetImageDesc(gifIn) == GIF_ERROR) {
                    LOGE("Could not read image descriptor (%d)", imageIndex);
                    return false;
                }

                // Sanity-check the current image position.
                if (gifIn->Image.Left < 0 ||
                        gifIn->Image.Top < 0 ||
                        gifIn->Image.Left + gifIn->Image.Width > gifIn->SWidth ||
                        gifIn->Image.Top + gifIn->Image.Height > gifIn->SHeight) {
                    LOGE("GIF image extends beyond logical screen");
                    return false;
                }

                // Write the new image descriptor.
                if (EGifPutImageDesc(gifOut,
                                     0, // Left
                                     0, // Top
                                     gifOut->SWidth,
                                     gifOut->SHeight,
                                     false, // Interlace
                                     gifIn->Image.ColorMap) == GIF_ERROR) {
                    LOGE("Could not write image descriptor (%d)", imageIndex);
                    return false;
                }

                // Read the image from the input GIF. The buffer is already initialized to the
                // size of the GIF, which is usually equal to the size of all the images inside it.
                // If not, the call to resize below ensures that the buffer is the right size.
                srcBuffer.resize(gifIn->Image.Width * gifIn->Image.Height);
                if (readImage(gifIn, srcBuffer.data()) == false) {
                    LOGE("Could not read image data (%d)", imageIndex);
                    return false;
                }
                LOGD("Read image data (%d)", imageIndex);
                // Render the image from the input GIF.
                if (renderImage(gifIn,
                                srcBuffer.data(),
                                imageIndex,
                                transparentColor,
                                renderBuffer.get(),
                                bgColor,
                                prevImageDimens,
                                prevImageDisposalMode) == false) {
                    LOGE("Could not render %d", imageIndex);
                    return false;
                }
                LOGD("Rendered image (%d)", imageIndex);

                // Generate the image in the output GIF.
                for (int y = 0; y < gifOut->SHeight; y++) {
                    for (int x = 0; x < gifOut->SWidth; x++) {
                      const GifByteType dstColorIndex = computeNewColorIndex(
                          gifIn, transparentColor, renderBuffer.get(), x, y);
                      *(dstRowBuffer.get() + x) = dstColorIndex;
                    }
                    if (EGifPutLine(gifOut, dstRowBuffer.get(), gifOut->SWidth) == GIF_ERROR) {
                        LOGE("Could not write raster data (%d)", imageIndex);
                        return false;
                    }
                }
                LOGD("Wrote raster data (%d)", imageIndex);

                // Save the disposal mode for rendering the next image.
                // We only support DISPOSE_DO_NOT and DISPOSE_BACKGROUND.
                prevImageDisposalMode = disposalMode;
                if (prevImageDisposalMode == DISPOSAL_UNSPECIFIED) {
                    prevImageDisposalMode = DISPOSE_DO_NOT;
                } else if (prevImageDisposalMode == DISPOSE_PREVIOUS) {
                    prevImageDisposalMode = DISPOSE_BACKGROUND;
                }
                if (prevImageDisposalMode == DISPOSE_BACKGROUND) {
                    prevImageDimens.Left = gifIn->Image.Left;
                    prevImageDimens.Top = gifIn->Image.Top;
                    prevImageDimens.Width = gifIn->Image.Width;
                    prevImageDimens.Height = gifIn->Image.Height;
                }

                if (gifOut->Image.ColorMap) {
                    GifFreeMapObject(gifOut->Image.ColorMap);
                    gifOut->Image.ColorMap = NULL;
                }

                imageIndex++;
            } break;
            case EXTENSION_RECORD_TYPE: {
                int extCode;
                GifByteType* ext;
                if (DGifGetExtension(gifIn, &extCode, &ext) == GIF_ERROR) {
                    LOGE("Could not read extension block");
                    return false;
                }
                LOGD("Read extension block, code: %d", extCode);
                if (extCode == GRAPHICS_EXT_FUNC_CODE) {
                    GraphicsControlBlock gcb;
                    if (DGifExtensionToGCB(ext[0], ext + 1, &gcb) == GIF_ERROR) {
                        LOGE("Could not interpret GCB extension");
                        return false;
                    }
                    transparentColor = gcb.TransparentColor;

                    // This logic for setting the background color based on the first GCB
                    // doesn't quite match the GIF spec, but empirically it seems to work and it
                    // matches what libframesequence (Rastermill) does.
                    if (imageIndex == 0 && gifIn->SColorMap) {
                        if (gcb.TransparentColor == NO_TRANSPARENT_COLOR) {
                            GifColorType bgColorIndex =
                                    gifIn->SColorMap->Colors[gifIn->SBackGroundColor];
                            bgColor = gifColorToColorARGB(bgColorIndex);
                            LOGD("Set background color based on first GCB");
                        }
                    }

                    // Record the original disposal mode and then update it.
                    disposalMode = gcb.DisposalMode;
                    gcb.DisposalMode = DISPOSE_BACKGROUND;
                    EGifGCBToExtension(&gcb, ext + 1);
                }
                if (EGifPutExtensionLeader(gifOut, extCode) == GIF_ERROR) {
                    LOGE("Could not write extension leader");
                    return false;
                }
                if (EGifPutExtensionBlock(gifOut, ext[0], ext + 1) == GIF_ERROR) {
                    LOGE("Could not write extension block");
                    return false;
                }
                LOGD("Wrote extension block");
                while (ext != NULL) {
                    if (DGifGetExtensionNext(gifIn, &ext) == GIF_ERROR) {
                        LOGE("Could not read extension continuation");
                        return false;
                    }
                    if (ext != NULL) {
                        LOGD("Read extension continuation");
                        if (EGifPutExtensionBlock(gifOut, ext[0], ext + 1) == GIF_ERROR) {
                            LOGE("Could not write extension continuation");
                            return false;
                        }
                        LOGD("Wrote extension continuation");
                    }
                }
                if (EGifPutExtensionTrailer(gifOut) == GIF_ERROR) {
                    LOGE("Could not write extension trailer");
                    return false;
                }
            } break;
        }

    } while (recordType != TERMINATE_RECORD_TYPE);
    LOGD("No more records");

    return true;
}
Exemplo n.º 6
0
void Ogre2dManager::renderQueueEnded(
   Ogre::uint8 queueGroupId, const Ogre::String &invocation, bool &repeatThisInvocation)
{
   if (afterQueue && queueGroupId==targetQueue)
      renderBuffer();
}
Exemplo n.º 7
0
bool
RiscosGui::run()
{
    GNASH_REPORT_FUNCTION;

    os_t t, now;
    wimp_block block;
    wimp_event_no event;
    osbool more;
    os_error *error;

    t = os_read_monotonic_time();

    while (!_quit) {
        error = xwimp_poll_idle(wimp_SAVE_FP, &block, t, NULL, &event);
        if (error) {
            log_debug("%s\n", error->errmess);
            return false;
        }

        switch (event) {
        case wimp_NULL_REASON_CODE:
            now = os_read_monotonic_time();
            if (now > t) {
                if (_timeout > now) {
                    _quit = true;
                } else {
                    // TODO: pay attention to interval
//            if ((os_t)_interval <= (now - t) * 10) {
                    advance_movie(this);
//            }
                    now = os_read_monotonic_time();
                    t = now + 10;
                }
            }
            break;
        case wimp_REDRAW_WINDOW_REQUEST:
            error = xwimp_redraw_window(&block.redraw, &more);
            if (error) {
                log_debug("%s\n", error->errmess);
                return false;
            }
            while (more) {
//          rect bounds(block.redraw.clip.x0 / 2, block.redraw.clip.y0 / 2,
//                      block.redraw.clip.x1 / 2, block.redraw.clip.y1 / 2);
//          log_debug("Clip rect: (%d, %d)(%d, %d)\n",
//                  block.redraw.clip.x0 / 2, block.redraw.clip.y0 / 2,
//                  block.redraw.clip.x1 / 2, block.redraw.clip.y1 / 2);
                // TODO: Make this use the clipping rectangle (convert to TWIPS)
                rect bounds(-1e10f, -1e10f, 1e10f, 1e10f);
#ifdef RENDERER_AGG
                setInvalidatedRegion(bounds);
#endif
                renderBuffer();
                error = xwimp_get_rectangle(&block.redraw, &more);
                if (error) {
                    log_debug("%s\n", error->errmess);
                    return false;
                }
            }
            break;
        case wimp_OPEN_WINDOW_REQUEST:
            error = xwimp_open_window(&block.open);
            if (error)
                log_debug("%s\n", error->errmess);
            break;
        case wimp_CLOSE_WINDOW_REQUEST:
            _quit = true;
            break;
        case wimp_POINTER_LEAVING_WINDOW:
            break;
        case wimp_POINTER_ENTERING_WINDOW:
            break;
        case wimp_MOUSE_CLICK:
            break;
        case wimp_USER_DRAG_BOX:
            break;
        case wimp_MENU_SELECTION:
            break;
        case wimp_SCROLL_REQUEST:
            break;
        case wimp_LOSE_CARET:
            break;
        case wimp_GAIN_CARET:
            break;
        case wimp_POLLWORD_NON_ZERO:
            break;
        case wimp_USER_MESSAGE:
        case wimp_USER_MESSAGE_RECORDED:
        case wimp_USER_MESSAGE_ACKNOWLEDGE:
            switch (block.message.action) {
            case message_QUIT:
                _quit = true;
                break;
            default:
//          user_message(event, &(block.message));
                break;
            }
            break;
        }
    }

    return true;
}
Exemplo n.º 8
0
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatSetDataSource:
        {
            LOGV("kWhatSetDataSource");

            CHECK(mSource == NULL);

            sp<RefBase> obj;
            CHECK(msg->findObject("source", &obj));

            mSource = static_cast<Source *>(obj.get());
            break;
        }

        case kWhatSetVideoNativeWindow:
        {
            LOGV("kWhatSetVideoNativeWindow");

            sp<RefBase> obj;
            CHECK(msg->findObject("native-window", &obj));

            mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
            break;
        }

        case kWhatSetAudioSink:
        {
            LOGV("kWhatSetAudioSink");

            sp<RefBase> obj;
            CHECK(msg->findObject("sink", &obj));

            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
            break;
        }

        case kWhatStart:
        {
            LOGV("kWhatStart");

            mVideoIsAVC = false;
            mAudioEOS = false;
            mVideoEOS = false;
            mSkipRenderingAudioUntilMediaTimeUs = -1;
            mSkipRenderingVideoUntilMediaTimeUs = -1;
            mVideoLateByUs = 0;
            mNumFramesTotal = 0;
            mNumFramesDropped = 0;

            mSource->start();

            mRenderer = new Renderer(
                    mAudioSink,
                    new AMessage(kWhatRendererNotify, id()));

            looper()->registerHandler(mRenderer);

            postScanSources();
            break;
        }

        case kWhatScanSources:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mScanSourcesGeneration) {
                // Drop obsolete msg.
                break;
            }

            mScanSourcesPending = false;

            LOGV("scanning sources haveAudio=%d, haveVideo=%d",
                 mAudioDecoder != NULL, mVideoDecoder != NULL);

            instantiateDecoder(false, &mVideoDecoder);

            if (mAudioSink != NULL) {
                instantiateDecoder(true, &mAudioDecoder);
            }

            status_t err;
            if ((err = mSource->feedMoreTSData()) != OK) {
                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
                    // We're not currently decoding anything (no audio or
                    // video tracks found) and we just ran out of input data.

                    if (err == ERROR_END_OF_STREAM) {
                        notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                    } else {
                        notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
                    }
                }
                break;
            }

            if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
                msg->post(100000ll);
                mScanSourcesPending = true;
            }
            break;
        }

        case kWhatVideoNotify:
        case kWhatAudioNotify:
        {
            bool audio = msg->what() == kWhatAudioNotify;

            sp<AMessage> codecRequest;
            CHECK(msg->findMessage("codec-request", &codecRequest));

            int32_t what;
            CHECK(codecRequest->findInt32("what", &what));

            if (what == ACodec::kWhatFillThisBuffer) {
                status_t err = feedDecoderInputData(
                        audio, codecRequest);

                if (err == -EWOULDBLOCK) {
                    if (mSource->feedMoreTSData() == OK) {
                        msg->post(10000ll);
                    }
                }
            } else if (what == ACodec::kWhatEOS) {
                int32_t err;
                CHECK(codecRequest->findInt32("err", &err));

                if (err == ERROR_END_OF_STREAM) {
                    LOGV("got %s decoder EOS", audio ? "audio" : "video");
                } else {
                    LOGV("got %s decoder EOS w/ error %d",
                         audio ? "audio" : "video",
                         err);
                }

                mRenderer->queueEOS(audio, err);
            } else if (what == ACodec::kWhatFlushCompleted) {
                bool needShutdown;

                if (audio) {
                    CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
                    mFlushingAudio = FLUSHED;
                } else {
                    CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
                    mFlushingVideo = FLUSHED;

                    mVideoLateByUs = 0;
                }

                LOGV("decoder %s flush completed", audio ? "audio" : "video");

                if (needShutdown) {
                    LOGV("initiating %s decoder shutdown",
                         audio ? "audio" : "video");

                    (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();

                    if (audio) {
                        mFlushingAudio = SHUTTING_DOWN_DECODER;
                    } else {
                        mFlushingVideo = SHUTTING_DOWN_DECODER;
                    }
                }

                finishFlushIfPossible();
            } else if (what == ACodec::kWhatOutputFormatChanged) {
                if (audio) {
                    int32_t numChannels;
                    CHECK(codecRequest->findInt32("channel-count", &numChannels));

                    int32_t sampleRate;
                    CHECK(codecRequest->findInt32("sample-rate", &sampleRate));

                    LOGV("Audio output format changed to %d Hz, %d channels",
                         sampleRate, numChannels);

                    mAudioSink->close();
                    CHECK_EQ(mAudioSink->open(
                                sampleRate,
                                numChannels,
                                AUDIO_FORMAT_PCM_16_BIT,
                                8 /* bufferCount */),
                             (status_t)OK);
                    mAudioSink->start();

                    mRenderer->signalAudioSinkChanged();
                } else {
                    // video

                    int32_t width, height;
                    CHECK(codecRequest->findInt32("width", &width));
                    CHECK(codecRequest->findInt32("height", &height));

                    int32_t cropLeft, cropTop, cropRight, cropBottom;
                    CHECK(codecRequest->findRect(
                                "crop",
                                &cropLeft, &cropTop, &cropRight, &cropBottom));

                    LOGV("Video output format changed to %d x %d "
                         "(crop: %d x %d @ (%d, %d))",
                         width, height,
                         (cropRight - cropLeft + 1),
                         (cropBottom - cropTop + 1),
                         cropLeft, cropTop);

                    notifyListener(
                            MEDIA_SET_VIDEO_SIZE,
                            cropRight - cropLeft + 1,
                            cropBottom - cropTop + 1);
                }
            } else if (what == ACodec::kWhatShutdownCompleted) {
                LOGV("%s shutdown completed", audio ? "audio" : "video");
                if (audio) {
                    mAudioDecoder.clear();

                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
                    mFlushingAudio = SHUT_DOWN;
                } else {
                    mVideoDecoder.clear();

                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
                    mFlushingVideo = SHUT_DOWN;
                }

                finishFlushIfPossible();
            } else if (what == ACodec::kWhatError) {
                LOGE("Received error from %s decoder, aborting playback.",
                     audio ? "audio" : "video");

                mRenderer->queueEOS(audio, UNKNOWN_ERROR);
            } else {
                CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);

                renderBuffer(audio, codecRequest);
            }

            break;
        }

        case kWhatRendererNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));

            if (what == Renderer::kWhatEOS) {
                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                int32_t finalResult;
                CHECK(msg->findInt32("finalResult", &finalResult));

                if (audio) {
                    mAudioEOS = true;
                } else {
                    mVideoEOS = true;
                }

                if (finalResult == ERROR_END_OF_STREAM) {
                    LOGV("reached %s EOS", audio ? "audio" : "video");
                } else {
                    LOGE("%s track encountered an error (%d)",
                         audio ? "audio" : "video", finalResult);

                    notifyListener(
                            MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult);
                }

                if ((mAudioEOS || mAudioDecoder == NULL)
                        && (mVideoEOS || mVideoDecoder == NULL)) {
                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                }
            } else if (what == Renderer::kWhatPosition) {
                int64_t positionUs;
                CHECK(msg->findInt64("positionUs", &positionUs));

                CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));

                if (mDriver != NULL) {
                    sp<NuPlayerDriver> driver = mDriver.promote();
                    if (driver != NULL) {
                        driver->notifyPosition(positionUs);

                        driver->notifyFrameStats(
                                mNumFramesTotal, mNumFramesDropped);
                    }
                }
            } else if (what == Renderer::kWhatFlushComplete) {
                CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete);

                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                LOGV("renderer %s flush completed.", audio ? "audio" : "video");
            }
            break;
        }

        case kWhatMoreDataQueued:
        {
            break;
        }

        case kWhatReset:
        {
            LOGV("kWhatReset");

            if (mRenderer != NULL) {
                // There's an edge case where the renderer owns all output
                // buffers and is paused, therefore the decoder will not read
                // more input data and will never encounter the matching
                // discontinuity. To avoid this, we resume the renderer.

                if (mFlushingAudio == AWAITING_DISCONTINUITY
                        || mFlushingVideo == AWAITING_DISCONTINUITY) {
                    mRenderer->resume();
                }
            }

            if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
                // We're currently flushing, postpone the reset until that's
                // completed.

                LOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d",
                        mFlushingAudio, mFlushingVideo);

                mResetPostponed = true;
                break;
            }

            if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
                finishReset();
                break;
            }

            mTimeDiscontinuityPending = true;

            if (mAudioDecoder != NULL) {
                flushDecoder(true /* audio */, true /* needShutdown */);
            }

            if (mVideoDecoder != NULL) {
                flushDecoder(false /* audio */, true /* needShutdown */);
            }

            mResetInProgress = true;
            break;
        }

        case kWhatSeek:
        {
            int64_t seekTimeUs;
            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));

            LOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)",
                 seekTimeUs, seekTimeUs / 1E6);

            mSource->seekTo(seekTimeUs);

            if (mDriver != NULL) {
                sp<NuPlayerDriver> driver = mDriver.promote();
                if (driver != NULL) {
                    driver->notifySeekComplete();
                }
            }

            break;
        }

        case kWhatPause:
        {
            CHECK(mRenderer != NULL);
            mRenderer->pause();
            break;
        }

        case kWhatResume:
        {
            CHECK(mRenderer != NULL);
            mRenderer->resume();
            break;
        }

        default:
            TRESPASS();
            break;
    }
}