Esempio n. 1
0
bool cv::gpu::VideoReader_GPU::Impl::grab(GpuMat& frame)
{
    if (videoSource_->hasError() || videoParser_->hasError())
        CV_Error(CV_StsUnsupportedFormat, "Unsupported video source");

    if (!videoSource_->isStarted() || frameQueue_->isEndOfDecode())
        return false;

    if (frames_.empty())
    {
        CUVIDPARSERDISPINFO displayInfo;

        for (;;)
        {
            if (frameQueue_->dequeue(displayInfo))
                break;

            if (videoSource_->hasError() || videoParser_->hasError())
                CV_Error(CV_StsUnsupportedFormat, "Unsupported video source");

            if (frameQueue_->isEndOfDecode())
                return false;

            // Wait a bit
            detail::Thread::sleep(1);
        }

        bool isProgressive = displayInfo.progressive_frame != 0;
        const int num_fields = isProgressive ? 1 : 2 + displayInfo.repeat_first_field;

        for (int active_field = 0; active_field < num_fields; ++active_field)
        {
            CUVIDPROCPARAMS videoProcParams;
            std::memset(&videoProcParams, 0, sizeof(CUVIDPROCPARAMS));

            videoProcParams.progressive_frame = displayInfo.progressive_frame;
            videoProcParams.second_field      = active_field;
            videoProcParams.top_field_first   = displayInfo.top_field_first;
            videoProcParams.unpaired_field    = (num_fields == 1);

            frames_.push_back(std::make_pair(displayInfo, videoProcParams));
        }
    }

    if (frames_.empty())
        return false;

    std::pair<CUVIDPARSERDISPINFO, CUVIDPROCPARAMS> frameInfo = frames_.front();
    frames_.pop_front();

    {
        VideoCtxAutoLock autoLock(lock_);

        // map decoded video frame to CUDA surface
        cv::gpu::GpuMat decodedFrame = videoDecoder_->mapFrame(frameInfo.first.picture_index, frameInfo.second);

        // perform post processing on the CUDA surface (performs colors space conversion and post processing)
        // comment this out if we inclue the line of code seen above
        cudaPostProcessFrame(decodedFrame, frame, videoDecoder_->targetWidth(), videoDecoder_->targetHeight());

        // unmap video frame
        // unmapFrame() synchronizes with the VideoDecode API (ensures the frame has finished decoding)
        videoDecoder_->unmapFrame(decodedFrame);
    }

    // release the frame, so it can be re-used in decoder
    if (frames_.empty())
        frameQueue_->releaseFrame(frameInfo.first);

    return true;
}
Esempio n. 2
0
void
nsPicoService::Init()
{
  MOZ_ASSERT(!NS_IsMainThread());
  MOZ_ASSERT(!mInitialized);

  if (!sPicoApi.Init()) {
    NS_WARNING("Failed to initialize pico library");
    return;
  }

  // Use environment variable, or default android/b2g path
  nsAutoCString langPath(PR_GetEnv("PICO_LANG_PATH"));

  if (langPath.IsEmpty()) {
    langPath.AssignLiteral(GONK_PICO_LANG_PATH);
  }

  nsCOMPtr<nsIFile> voicesDir;
  NS_NewNativeLocalFile(langPath, true, getter_AddRefs(voicesDir));

  nsCOMPtr<nsISimpleEnumerator> dirIterator;
  nsresult rv = voicesDir->GetDirectoryEntries(getter_AddRefs(dirIterator));

  if (NS_FAILED(rv)) {
    NS_WARNING(nsPrintfCString("Failed to get contents of directory: %s", langPath.get()).get());
    return;
  }

  bool hasMoreElements = false;
  rv = dirIterator->HasMoreElements(&hasMoreElements);
  MOZ_ASSERT(NS_SUCCEEDED(rv));

  MonitorAutoLock autoLock(mVoicesMonitor);

  while (hasMoreElements && NS_SUCCEEDED(rv)) {
    nsCOMPtr<nsISupports> supports;
    rv = dirIterator->GetNext(getter_AddRefs(supports));
    MOZ_ASSERT(NS_SUCCEEDED(rv));

    nsCOMPtr<nsIFile> voiceFile = do_QueryInterface(supports);
    MOZ_ASSERT(voiceFile);

    nsAutoCString leafName;
    voiceFile->GetNativeLeafName(leafName);

    nsAutoString lang;

    if (GetVoiceFileLanguage(leafName, lang)) {
      nsAutoString uri;
      uri.AssignLiteral("urn:moz-tts:pico:");
      uri.Append(lang);

      bool found = false;
      PicoVoice* voice = mVoices.GetWeak(uri, &found);

      if (!found) {
        voice = new PicoVoice(lang);
        mVoices.Put(uri, voice);
      }

      // Each voice consists of two lingware files: A language resource file,
      // suffixed by _ta.bin, and a speaker resource file, suffixed by _sb.bin.
      // We currently assume that there is a pair of files for each language.
      if (StringEndsWith(leafName, NS_LITERAL_CSTRING("_ta.bin"))) {
        rv = voiceFile->GetPersistentDescriptor(voice->mTaFile);
        MOZ_ASSERT(NS_SUCCEEDED(rv));
      } else if (StringEndsWith(leafName, NS_LITERAL_CSTRING("_sg.bin"))) {
        rv = voiceFile->GetPersistentDescriptor(voice->mSgFile);
        MOZ_ASSERT(NS_SUCCEEDED(rv));
      }
    }

    rv = dirIterator->HasMoreElements(&hasMoreElements);
  }

  NS_DispatchToMainThread(NS_NewRunnableMethod(this, &nsPicoService::RegisterVoices));
}
status_t SampleTable::findSyncSampleNear(
        uint32_t start_sample_index, uint32_t *sample_index, uint32_t flags) {
    Mutex::Autolock autoLock(mLock);

    *sample_index = 0;

    if (mSyncSampleOffset < 0) {
        // All samples are sync-samples.
        *sample_index = start_sample_index;
        return OK;
    }

    if (mNumSyncSamples == 0) {
        *sample_index = 0;
        return OK;
    }

    uint32_t left = 0;
    while (left < mNumSyncSamples) {
        uint32_t x = mSyncSamples[left];

        if (x >= start_sample_index) {
            break;
        }

        ++left;
    }

    if (left == mNumSyncSamples) {
        if (flags == kFlagAfter) {
            LOGE("tried to find a sync frame after the last one: %d", left);
            return ERROR_OUT_OF_RANGE;
        }
    }

    if (left > 0) {
        --left;
    }

    uint32_t x = mSyncSamples[left];

    if (left + 1 < mNumSyncSamples) {
        uint32_t y = mSyncSamples[left + 1];

        // our sample lies between sync samples x and y.

        status_t err = mSampleIterator->seekTo(start_sample_index);
        if (err != OK) {
            return err;
        }

        uint32_t sample_time = mSampleIterator->getSampleTime();

        err = mSampleIterator->seekTo(x);
        if (err != OK) {
            return err;
        }
        uint32_t x_time = mSampleIterator->getSampleTime();

        err = mSampleIterator->seekTo(y);
        if (err != OK) {
            return err;
        }

        uint32_t y_time = mSampleIterator->getSampleTime();

        if (abs_difference(x_time, sample_time)
                > abs_difference(y_time, sample_time)) {
            // Pick the sync sample closest (timewise) to the start-sample.
            x = y;
            ++left;
        }
    }

    switch (flags) {
        case kFlagBefore:
        {
            if (x > start_sample_index) {
                CHECK(left > 0);

                x = mSyncSamples[left - 1];

                CHECK(x <= start_sample_index);
            }
            break;
        }

        case kFlagAfter:
        {
            if (x < start_sample_index) {
                if (left + 1 >= mNumSyncSamples) {
                    return ERROR_OUT_OF_RANGE;
                }

                x = mSyncSamples[left + 1];

                CHECK(x >= start_sample_index);
            }

            break;
        }

        default:
            break;
    }

    *sample_index = x;

    return OK;
}
void TimedEventQueue::threadEntry() {
    prctl(PR_SET_NAME, (unsigned long)"TimedEventQueue", 0, 0, 0);

    for (;;) {
        int64_t now_us = 0;
        sp<Event> event;

        {
            Mutex::Autolock autoLock(mLock);

            if (mStopped) {
                break;
            }

            while (mQueue.empty()) {
                mQueueNotEmptyCondition.wait(mLock);
            }

            event_id eventID = 0;
            for (;;) {
                if (mQueue.empty()) {
                    // The only event in the queue could have been cancelled
                    // while we were waiting for its scheduled time.
                    break;
                }

                List<QueueItem>::iterator it = mQueue.begin();
                eventID = (*it).event->eventID();

                now_us = getRealTimeUs();
                int64_t when_us = (*it).realtime_us;

                int64_t delay_us;
                if (when_us < 0 || when_us == INT64_MAX) {
                    delay_us = 0;
                } else {
                    delay_us = when_us - now_us;
                }

                if (delay_us <= 0) {
                    break;
                }

                static int64_t kMaxTimeoutUs = 10000000ll;  // 10 secs
                bool timeoutCapped = false;
                if (delay_us > kMaxTimeoutUs) {
                    LOGW("delay_us exceeds max timeout: %lld us", delay_us);

                    // We'll never block for more than 10 secs, instead
                    // we will split up the full timeout into chunks of
                    // 10 secs at a time. This will also avoid overflow
                    // when converting from us to ns.
                    delay_us = kMaxTimeoutUs;
                    timeoutCapped = true;
                }

                status_t err = mQueueHeadChangedCondition.waitRelative(
                        mLock, delay_us * 1000ll);

                if (!timeoutCapped && err == -ETIMEDOUT) {
                    // We finally hit the time this event is supposed to
                    // trigger.
                    now_us = getRealTimeUs();
                    break;
                }
            }

            // The event w/ this id may have been cancelled while we're
            // waiting for its trigger-time, in that case
            // removeEventFromQueue_l will return NULL.
            // Otherwise, the QueueItem will be removed
            // from the queue and the referenced event returned.
            event = removeEventFromQueue_l(eventID);
        }

        if (event != NULL) {
            // Fire event with the lock NOT held.
            event->fire(this, now_us);
        }
    }
}
size_t AudioPlayer::fillBuffer(void *data, size_t size) {
    if (mNumFramesPlayed == 0) {
        ALOGV("AudioCallback");
    }

    if (mReachedEOS) {
        return 0;
    }

    bool postSeekComplete = false;
    bool postEOS = false;
    int64_t postEOSDelayUs = 0;

    size_t size_done = 0;
    size_t size_remaining = size;
    while (size_remaining > 0) {
        MediaSource::ReadOptions options;

        {
            Mutex::Autolock autoLock(mLock);

            if (mSeeking) {
                if (mIsFirstBuffer) {
                    if (mFirstBuffer != NULL) {
                        mFirstBuffer->release();
                        mFirstBuffer = NULL;
                    }
                    mIsFirstBuffer = false;
                }

                options.setSeekTo(mSeekTimeUs);

                if (mInputBuffer != NULL) {
                    mInputBuffer->release();
                    mInputBuffer = NULL;
                }

                mSeeking = false;
                if (mObserver) {
                    postSeekComplete = true;
                }
            }
        }

        if (mInputBuffer == NULL) {
            status_t err;

            if (mIsFirstBuffer) {
                mInputBuffer = mFirstBuffer;
                mFirstBuffer = NULL;
                err = mFirstBufferResult;

                mIsFirstBuffer = false;
            } else {
                err = mSource->read(&mInputBuffer, &options);
#ifdef QCOM_HARDWARE
                if (err == OK && mInputBuffer == NULL && mSourcePaused) {
                    ALOGV("mSourcePaused, return 0 from fillBuffer");
                    return 0;
                }
#endif
            }

            CHECK((err == OK && mInputBuffer != NULL)
                   || (err != OK && mInputBuffer == NULL));

            Mutex::Autolock autoLock(mLock);

            if (err != OK) {
                if (mObserver && !mReachedEOS) {
                    // We don't want to post EOS right away but only
                    // after all frames have actually been played out.

                    // These are the number of frames submitted to the
                    // AudioTrack that you haven't heard yet.
                    uint32_t numFramesPendingPlayout =
                        getNumFramesPendingPlayout();

                    // These are the number of frames we're going to
                    // submit to the AudioTrack by returning from this
                    // callback.
                    uint32_t numAdditionalFrames = size_done / mFrameSize;

                    numFramesPendingPlayout += numAdditionalFrames;

                    int64_t timeToCompletionUs =
                        (1000000ll * numFramesPendingPlayout) / mSampleRate;

                    ALOGV("total number of frames played: %lld (%lld us)",
                            (mNumFramesPlayed + numAdditionalFrames),
                            1000000ll * (mNumFramesPlayed + numAdditionalFrames)
                                / mSampleRate);

                    ALOGV("%d frames left to play, %lld us (%.2f secs)",
                         numFramesPendingPlayout,
                         timeToCompletionUs, timeToCompletionUs / 1E6);

                    postEOS = true;
                    if (mAudioSink->needsTrailingPadding()) {
                        postEOSDelayUs = timeToCompletionUs + mLatencyUs;
                    } else {
                        postEOSDelayUs = 0;
                    }
                }

                mReachedEOS = true;
                mFinalStatus = err;
                break;
            }

            if (mAudioSink != NULL) {
                mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
            } else {
                mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
            }

            if (mInputBuffer->range_length() != 0) {
                //check for non zero buffer
                CHECK(mInputBuffer->meta_data()->findInt64(
                        kKeyTime, &mPositionTimeMediaUs));

            }
            mPositionTimeRealUs =
                    ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
                    / mSampleRate;

            ALOGV("buffer->size() = %d, "
                    "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
                    mInputBuffer->range_length(),
                    mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
        }

        if (mInputBuffer->range_length() == 0) {
            mInputBuffer->release();
            mInputBuffer = NULL;

            continue;
        }

        size_t copy = size_remaining;
        if (copy > mInputBuffer->range_length()) {
            copy = mInputBuffer->range_length();
        }

        memcpy((char *)data + size_done,
               (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
               copy);

        mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
                                mInputBuffer->range_length() - copy);

        size_done += copy;
        size_remaining -= copy;
    }

    {
        Mutex::Autolock autoLock(mLock);
        mNumFramesPlayed += size_done / mFrameSize;

        if (mReachedEOS) {
            mPinnedTimeUs = mNumFramesPlayedSysTimeUs;
        } else {
            mNumFramesPlayedSysTimeUs = ALooper::GetNowUs();
            mPinnedTimeUs = -1ll;
        }
    }

    if (postEOS) {
        mObserver->postAudioEOS(postEOSDelayUs);
    }

    if (postSeekComplete) {
        mObserver->postAudioSeekComplete();
    }

    return size_done;
}
Esempio n. 6
0
void OMXCodecProxy::setEventListener(const wp<OMXCodecProxy::EventListener>& listener)
{
  Mutex::Autolock autoLock(mLock);
  mEventListener = listener;
}
Esempio n. 7
0
size_t AudioOffloadPlayer::FillBuffer(void* aData, size_t aSize)
{
  CHECK(mAudioSink.get());

  if (mReachedEOS) {
    return 0;
  }

  size_t sizeDone = 0;
  size_t sizeRemaining = aSize;
  int64_t seekTimeUs = -1;
  while (sizeRemaining > 0) {
    MediaSource::ReadOptions options;
    bool refreshSeekTime = false;
    {
      android::Mutex::Autolock autoLock(mLock);

      if (mSeekTarget.IsValid()) {
        seekTimeUs = mSeekTarget.GetTime().ToMicroseconds();
        options.setSeekTo(seekTimeUs);
        refreshSeekTime = true;

        if (mInputBuffer) {
          mInputBuffer->release();
          mInputBuffer = nullptr;
        }
      }
    }

    if (!mInputBuffer) {
      status_t err;
      err = mSource->read(&mInputBuffer, &options);

      CHECK((!err && mInputBuffer) || (err && !mInputBuffer));

      android::Mutex::Autolock autoLock(mLock);

      if (err != OK) {
        if (mSeekTarget.IsValid()) {
          mSeekTarget.Reset();
        }
        AUDIO_OFFLOAD_LOG(LogLevel::Error, ("Error while reading media source %d "
            "Ok to receive EOS error at end", err));
        if (!mReachedEOS) {
          // After seek there is a possible race condition if
          // OffloadThread is observing state_stopping_1 before
          // framesReady() > 0. Ensure sink stop is called
          // after last buffer is released. This ensures the
          // partial buffer is written to the driver before
          // stopping one is observed.The drawback is that
          // there will be an unnecessary call to the parser
          // after parser signalled EOS.
          if (sizeDone > 0) {
            AUDIO_OFFLOAD_LOG(LogLevel::Debug, ("send Partial buffer down"));
            AUDIO_OFFLOAD_LOG(LogLevel::Debug, ("skip calling stop till next"
                " fillBuffer"));
            break;
          }
          // no more buffers to push - stop() and wait for STREAM_END
          // don't set mReachedEOS until stream end received
          mAudioSink->Stop();
        }
        break;
      }

      if(mInputBuffer->range_length() != 0) {
        CHECK(mInputBuffer->meta_data()->findInt64(
            kKeyTime, &mPositionTimeMediaUs));
      }

      if (mSeekTarget.IsValid() &&
          seekTimeUs == mSeekTarget.GetTime().ToMicroseconds()) {
        MOZ_ASSERT(mSeekTarget.IsValid());
        mSeekTarget.Reset();
        if (!mSeekPromise.IsEmpty()) {
          AUDIO_OFFLOAD_LOG(LogLevel::Debug, ("FillBuffer posting SEEK_COMPLETE"));
          MediaDecoder::SeekResolveValue val(mReachedEOS, mSeekTarget.mEventVisibility);
          mSeekPromise.Resolve(val, __func__);
        }
      } else if (mSeekTarget.IsValid()) {
        AUDIO_OFFLOAD_LOG(LogLevel::Debug, ("seek is updated during unlocking mLock"));
      }

      if (refreshSeekTime) {
        NotifyPositionChanged();

        // need to adjust the mStartPosUs for offload decoding since parser
        // might not be able to get the exact seek time requested.
        mStartPosUs = mPositionTimeMediaUs;
        AUDIO_OFFLOAD_LOG(LogLevel::Debug, ("Adjust seek time to: %.2f",
            mStartPosUs / 1E6));
      }
    }

    if (mInputBuffer->range_length() == 0) {
      mInputBuffer->release();
      mInputBuffer = nullptr;
      continue;
    }

    size_t copy = sizeRemaining;
    if (copy > mInputBuffer->range_length()) {
      copy = mInputBuffer->range_length();
    }

    memcpy((char *)aData + sizeDone,
        (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
        copy);

    mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
        mInputBuffer->range_length() - copy);

    sizeDone += copy;
    sizeRemaining -= copy;
  }
  return sizeDone;
}
DRMSource::~DRMSource() {
    Mutex::Autolock autoLock(mDRMLock);
    mDrmManagerClient->finalizeDecryptUnit(mDecryptHandle, mTrackId);
}
status_t DRMSource::read(MediaBuffer **buffer, const ReadOptions *options) {
    Mutex::Autolock autoLock(mDRMLock);
    status_t err;
    if ((err = mOriginalMediaSource->read(buffer, options)) != OK) {
        return err;
    }

    size_t len = (*buffer)->range_length();

    char *src = (char *)(*buffer)->data() + (*buffer)->range_offset();

    DrmBuffer encryptedDrmBuffer(src, len);
    DrmBuffer decryptedDrmBuffer;
    decryptedDrmBuffer.length = len;
    decryptedDrmBuffer.data = new char[len];
    DrmBuffer *pDecryptedDrmBuffer = &decryptedDrmBuffer;

    if ((err = mDrmManagerClient->decrypt(mDecryptHandle, mTrackId,
            &encryptedDrmBuffer, &pDecryptedDrmBuffer)) != NO_ERROR) {

        if (decryptedDrmBuffer.data) {
            delete [] decryptedDrmBuffer.data;
            decryptedDrmBuffer.data = NULL;
        }

        return err;
    }
    CHECK(pDecryptedDrmBuffer == &decryptedDrmBuffer);

    const char *mime;
    CHECK(getFormat()->findCString(kKeyMIMEType, &mime));

    if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC) && !mWantsNALFragments) {
        uint8_t *dstData = (uint8_t*)src;
        size_t srcOffset = 0;
        size_t dstOffset = 0;

        len = decryptedDrmBuffer.length;
        while (srcOffset < len) {
            CHECK(srcOffset + mNALLengthSize <= len);
            size_t nalLength = 0;
            const uint8_t* data = (const uint8_t*)(&decryptedDrmBuffer.data[srcOffset]);

            switch (mNALLengthSize) {
                case 1:
                    nalLength = *data;
                    break;
                case 2:
                    nalLength = U16_AT(data);
                    break;
                case 3:
                    nalLength = ((size_t)data[0] << 16) | U16_AT(&data[1]);
                    break;
                case 4:
                    nalLength = U32_AT(data);
                    break;
                default:
                    CHECK(!"Should not be here.");
                    break;
            }

            srcOffset += mNALLengthSize;

            if (srcOffset + nalLength > len) {
                if (decryptedDrmBuffer.data) {
                    delete [] decryptedDrmBuffer.data;
                    decryptedDrmBuffer.data = NULL;
                }

                return ERROR_MALFORMED;
            }

            if (nalLength == 0) {
                continue;
            }

            CHECK(dstOffset + 4 <= (*buffer)->size());

            dstData[dstOffset++] = 0;
            dstData[dstOffset++] = 0;
            dstData[dstOffset++] = 0;
            dstData[dstOffset++] = 1;
            memcpy(&dstData[dstOffset], &decryptedDrmBuffer.data[srcOffset], nalLength);
            srcOffset += nalLength;
            dstOffset += nalLength;
        }

        CHECK_EQ(srcOffset, len);
        (*buffer)->set_range((*buffer)->range_offset(), dstOffset);

    } else {
        memcpy(src, decryptedDrmBuffer.data, decryptedDrmBuffer.length);
        (*buffer)->set_range((*buffer)->range_offset(), decryptedDrmBuffer.length);
    }

    if (decryptedDrmBuffer.data) {
        delete [] decryptedDrmBuffer.data;
        decryptedDrmBuffer.data = NULL;
    }

    return OK;
}
Esempio n. 10
0
void
ImageHost::Composite(EffectChain& aEffectChain,
                     float aOpacity,
                     const gfx::Matrix4x4& aTransform,
                     const gfx::Filter& aFilter,
                     const gfx::Rect& aClipRect,
                     const nsIntRegion* aVisibleRegion,
                     TiledLayerProperties* aLayerProperties)
{
  if (!GetCompositor()) {
    // should only happen when a tab is dragged to another window and
    // async-video is still sending frames but we haven't attached the
    // set the new compositor yet.
    return;
  }
  if (!mFrontBuffer) {
    return;
  }

  // Make sure the front buffer has a compositor
  mFrontBuffer->SetCompositor(GetCompositor());

  AutoLockTextureHost autoLock(mFrontBuffer);
  if (autoLock.Failed()) {
    NS_WARNING("failed to lock front buffer");
    return;
  }
  RefPtr<NewTextureSource> source = mFrontBuffer->GetTextureSources();
  if (!source) {
    return;
  }
  RefPtr<TexturedEffect> effect = CreateTexturedEffect(mFrontBuffer->GetFormat(),
                                                       source,
                                                       aFilter);
  if (!effect) {
    return;
  }

  aEffectChain.mPrimaryEffect = effect;
  IntSize textureSize = source->GetSize();
  gfx::Rect gfxPictureRect
    = mHasPictureRect ? gfx::Rect(0, 0, mPictureRect.width, mPictureRect.height)
                      : gfx::Rect(0, 0, textureSize.width, textureSize.height);

  gfx::Rect pictureRect(0, 0,
                        mPictureRect.width,
                        mPictureRect.height);
  //XXX: We might have multiple texture sources here (e.g. 3 YCbCr textures), and we're
  // only iterating over the tiles of the first one. Are we assuming that the tiling
  // will be identical? Can we ensure that somehow?
  TileIterator* it = source->AsTileIterator();
  if (it) {
    it->BeginTileIteration();
    do {
      nsIntRect tileRect = it->GetTileRect();
      gfx::Rect rect(tileRect.x, tileRect.y, tileRect.width, tileRect.height);
      if (mHasPictureRect) {
        rect = rect.Intersect(pictureRect);
        effect->mTextureCoords = Rect(Float(rect.x - tileRect.x)/ tileRect.width,
                                      Float(rect.y - tileRect.y) / tileRect.height,
                                      Float(rect.width) / tileRect.width,
                                      Float(rect.height) / tileRect.height);
      } else {
        effect->mTextureCoords = Rect(0, 0, 1, 1);
      }
      GetCompositor()->DrawQuad(rect, aClipRect, aEffectChain,
                                aOpacity, aTransform);
      GetCompositor()->DrawDiagnostics(DIAGNOSTIC_IMAGE|DIAGNOSTIC_BIGIMAGE,
                                       rect, aClipRect, aTransform);
    } while (it->NextTile());
    it->EndTileIteration();
    // layer border
    GetCompositor()->DrawDiagnostics(DIAGNOSTIC_IMAGE,
                                     gfxPictureRect, aClipRect,
                                     aTransform);
  } else {
    IntSize textureSize = source->GetSize();
    gfx::Rect rect;
    if (mHasPictureRect) {
      effect->mTextureCoords = Rect(Float(mPictureRect.x) / textureSize.width,
                                    Float(mPictureRect.y) / textureSize.height,
                                    Float(mPictureRect.width) / textureSize.width,
                                    Float(mPictureRect.height) / textureSize.height);
      rect = pictureRect;
    } else {
      effect->mTextureCoords = Rect(0, 0, 1, 1);
      rect = gfx::Rect(0, 0, textureSize.width, textureSize.height);
    }

    if (mFrontBuffer->GetFlags() & TEXTURE_NEEDS_Y_FLIP) {
      effect->mTextureCoords.y = effect->mTextureCoords.YMost();
      effect->mTextureCoords.height = -effect->mTextureCoords.height;
    }

    GetCompositor()->DrawQuad(rect, aClipRect, aEffectChain,
                              aOpacity, aTransform);
    GetCompositor()->DrawDiagnostics(DIAGNOSTIC_IMAGE,
                                     rect, aClipRect,
                                     aTransform);
  }
}
void
ImageLayerD3D10::RenderLayer()
{
  ImageContainer *container = GetContainer();
  if (!container) {
    return;
  }

  AutoLockImage autoLock(container);

  Image *image = autoLock.GetImage();
  if (!image) {
    return;
  }

  gfxIntSize size = mScaleMode == SCALE_NONE ? image->GetSize() : mScaleToSize;

  SetEffectTransformAndOpacity();

  ID3D10EffectTechnique *technique;

  if (image->GetFormat() == Image::CAIRO_SURFACE || image->GetFormat() == Image::REMOTE_IMAGE_BITMAP)
  {
    bool hasAlpha = false;

    if (image->GetFormat() == Image::REMOTE_IMAGE_BITMAP) {
      RemoteBitmapImage *remoteImage =
        static_cast<RemoteBitmapImage*>(image);
      
      if (!image->GetBackendData(LayerManager::LAYERS_D3D10)) {
        nsAutoPtr<TextureD3D10BackendData> dat = new TextureD3D10BackendData();
        dat->mTexture = DataToTexture(device(), remoteImage->mData, remoteImage->mStride, remoteImage->mSize);

        if (dat->mTexture) {
          device()->CreateShaderResourceView(dat->mTexture, NULL, getter_AddRefs(dat->mSRView));
          image->SetBackendData(LayerManager::LAYERS_D3D10, dat.forget());
        }
      }

      hasAlpha = remoteImage->mFormat == RemoteImageData::BGRA32;
    } else {
      CairoImage *cairoImage =
        static_cast<CairoImage*>(image);

      if (!cairoImage->mSurface) {
        return;
      }

      if (!image->GetBackendData(LayerManager::LAYERS_D3D10)) {
        nsAutoPtr<TextureD3D10BackendData> dat = new TextureD3D10BackendData();
        dat->mTexture = SurfaceToTexture(device(), cairoImage->mSurface, cairoImage->mSize);

        if (dat->mTexture) {
          device()->CreateShaderResourceView(dat->mTexture, NULL, getter_AddRefs(dat->mSRView));
          image->SetBackendData(LayerManager::LAYERS_D3D10, dat.forget());
        }
      }

      hasAlpha = cairoImage->mSurface->GetContentType() == gfxASurface::CONTENT_COLOR_ALPHA;
    }

    TextureD3D10BackendData *data =
      static_cast<TextureD3D10BackendData*>(image->GetBackendData(LayerManager::LAYERS_D3D10));

    if (!data) {
      return;
    }

    nsRefPtr<ID3D10Device> dev;
    data->mTexture->GetDevice(getter_AddRefs(dev));
    if (dev != device()) {
      return;
    }
    
    if (hasAlpha) {
      if (mFilter == gfxPattern::FILTER_NEAREST) {
        technique = effect()->GetTechniqueByName("RenderRGBALayerPremulPoint");
      } else {
        technique = effect()->GetTechniqueByName("RenderRGBALayerPremul");
      }
    } else {
      if (mFilter == gfxPattern::FILTER_NEAREST) {
        technique = effect()->GetTechniqueByName("RenderRGBLayerPremulPoint");
      } else {
        technique = effect()->GetTechniqueByName("RenderRGBLayerPremul");
      }
    }

    effect()->GetVariableByName("tRGB")->AsShaderResource()->SetResource(data->mSRView);

    effect()->GetVariableByName("vLayerQuad")->AsVector()->SetFloatVector(
      ShaderConstantRectD3D10(
        (float)0,
        (float)0,
        (float)size.width,
        (float)size.height)
      );
  } else if (image->GetFormat() == Image::PLANAR_YCBCR) {
    PlanarYCbCrImage *yuvImage =
      static_cast<PlanarYCbCrImage*>(image);

    if (!yuvImage->mBufferSize) {
      return;
    }

    if (!yuvImage->GetBackendData(LayerManager::LAYERS_D3D10)) {
      AllocateTexturesYCbCr(yuvImage);
    }

    PlanarYCbCrD3D10BackendData *data =
      static_cast<PlanarYCbCrD3D10BackendData*>(yuvImage->GetBackendData(LayerManager::LAYERS_D3D10));

    if (!data) {
      return;
    }

    nsRefPtr<ID3D10Device> dev;
    data->mYTexture->GetDevice(getter_AddRefs(dev));
    if (dev != device()) {
      return;
    }

    // TODO: At some point we should try to deal with mFilter here, you don't
    // really want to use point filtering in the case of NEAREST, since that
    // would also use point filtering for Chroma upsampling. Where most likely
    // the user would only want point filtering for final RGB image upsampling.

    technique = effect()->GetTechniqueByName("RenderYCbCrLayer");

    effect()->GetVariableByName("tY")->AsShaderResource()->SetResource(data->mYView);
    effect()->GetVariableByName("tCb")->AsShaderResource()->SetResource(data->mCbView);
    effect()->GetVariableByName("tCr")->AsShaderResource()->SetResource(data->mCrView);

    /*
     * Send 3d control data and metadata to NV3DVUtils
     */
    if (GetNv3DVUtils()) {
      Nv_Stereo_Mode mode;
      switch (yuvImage->mData.mStereoMode) {
      case STEREO_MODE_LEFT_RIGHT:
        mode = NV_STEREO_MODE_LEFT_RIGHT;
        break;
      case STEREO_MODE_RIGHT_LEFT:
        mode = NV_STEREO_MODE_RIGHT_LEFT;
        break;
      case STEREO_MODE_BOTTOM_TOP:
        mode = NV_STEREO_MODE_BOTTOM_TOP;
        break;
      case STEREO_MODE_TOP_BOTTOM:
        mode = NV_STEREO_MODE_TOP_BOTTOM;
        break;
      case STEREO_MODE_MONO:
        mode = NV_STEREO_MODE_MONO;
        break;
      }
      
      // Send control data even in mono case so driver knows to leave stereo mode.
      GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);

      if (yuvImage->mData.mStereoMode != STEREO_MODE_MONO) {
        // Dst resource is optional
        GetNv3DVUtils()->SendNv3DVMetaData((unsigned int)yuvImage->mSize.width, 
                                           (unsigned int)yuvImage->mSize.height, (HANDLE)(data->mYTexture), (HANDLE)(NULL));
      }
    }

    effect()->GetVariableByName("vLayerQuad")->AsVector()->SetFloatVector(
      ShaderConstantRectD3D10(
        (float)0,
        (float)0,
        (float)size.width,
        (float)size.height)
      );

    effect()->GetVariableByName("vTextureCoords")->AsVector()->SetFloatVector(
      ShaderConstantRectD3D10(
        (float)yuvImage->mData.mPicX / yuvImage->mData.mYSize.width,
        (float)yuvImage->mData.mPicY / yuvImage->mData.mYSize.height,
        (float)yuvImage->mData.mPicSize.width / yuvImage->mData.mYSize.width,
        (float)yuvImage->mData.mPicSize.height / yuvImage->mData.mYSize.height)
       );
  }
  
  bool resetTexCoords = image->GetFormat() == Image::PLANAR_YCBCR;
  image = nsnull;
  autoLock.Unlock();

  technique->GetPassByIndex(0)->Apply(0);
  device()->Draw(4, 0);

  if (resetTexCoords) {
    effect()->GetVariableByName("vTextureCoords")->AsVector()->
      SetFloatVector(ShaderConstantRectD3D10(0, 0, 1.0f, 1.0f));
  }

  GetContainer()->NotifyPaintedImage(image);
}
void BlockIterator::seek(
        int64_t seekTimeUs, bool isAudio,
        int64_t *actualFrameTimeUs) {
    Mutex::Autolock autoLock(mExtractor->mLock);

    *actualFrameTimeUs = -1ll;

    const int64_t seekTimeNs = seekTimeUs * 1000ll;

    mkvparser::Segment* const pSegment = mExtractor->mSegment;

    // Special case the 0 seek to avoid loading Cues when the application
    // extraneously seeks to 0 before playing.
    if (seekTimeNs <= 0) {
        ALOGV("Seek to beginning: %lld", seekTimeUs);
        mCluster = pSegment->GetFirst();
        mBlockEntryIndex = 0;
        do {
            advance_l();
        } while (!eos() && block()->GetTrackNumber() != mTrackNum);
        return;
    }

    ALOGV("Seeking to: %lld", seekTimeUs);

    // If the Cues have not been located then find them.
    const mkvparser::Cues* pCues = pSegment->GetCues();
    const mkvparser::SeekHead* pSH = pSegment->GetSeekHead();
    if (!pCues && pSH) {
        const size_t count = pSH->GetCount();
        const mkvparser::SeekHead::Entry* pEntry;
        ALOGV("No Cues yet");

        for (size_t index = 0; index < count; index++) {
            pEntry = pSH->GetEntry(index);

            if (pEntry->id == 0x0C53BB6B) { // Cues ID
                long len; long long pos;
                pSegment->ParseCues(pEntry->pos, pos, len);
                pCues = pSegment->GetCues();
                ALOGV("Cues found");
                break;
            }
        }

        if (!pCues) {
            ALOGE("No Cues in file");
            return;
        }
    }
    else if (!pSH) {
        ALOGE("No SeekHead");
        return;
    }

    const mkvparser::CuePoint* pCP;
    while (!pCues->DoneParsing()) {
        pCues->LoadCuePoint();
        pCP = pCues->GetLast();

        if (pCP->GetTime(pSegment) >= seekTimeNs) {
            ALOGV("Parsed past relevant Cue");
            break;
        }
    }

    // The Cue index is built around video keyframes
    mkvparser::Tracks const *pTracks = pSegment->GetTracks();
    const mkvparser::Track *pTrack = NULL;
    for (size_t index = 0; index < pTracks->GetTracksCount(); ++index) {
        pTrack = pTracks->GetTrackByIndex(index);
        if (pTrack && pTrack->GetType() == 1) { // VIDEO_TRACK
            ALOGV("Video track located at %d", index);
            break;
        }
    }

    // Always *search* based on the video track, but finalize based on mTrackNum
    const mkvparser::CuePoint::TrackPosition* pTP;
    if (pTrack && pTrack->GetType() == 1) {
        if (!pCues->Find(seekTimeNs, pTrack, pCP, pTP)) {
            ALOGE("Did not find cue-point for video track at %lld", seekTimeUs);
            return;
        }
    } else {
        ALOGE("Did not locate the video track for seeking");
        return;
    }

    mCluster = pSegment->FindOrPreloadCluster(pTP->m_pos);

    CHECK(mCluster);
    CHECK(!mCluster->EOS());

    // mBlockEntryIndex starts at 0 but m_block starts at 1
    CHECK_GT(pTP->m_block, 0);
    mBlockEntryIndex = pTP->m_block - 1;

    for (;;) {
        advance_l();

        if (eos()) break;

        if (isAudio || block()->IsKey()) {
            // Accept the first key frame
            *actualFrameTimeUs = (block()->GetTime(mCluster) + 500LL) / 1000LL;
            ALOGV("Requested seek point: %lld actual: %lld",
                  seekTimeUs, actualFrameTimeUs);
            break;
        }
    }
}
void BlockIterator::advance() {
    Mutex::Autolock autoLock(mExtractor->mLock);
    advance_l();
}
Esempio n. 14
0
size_t AudioPlayer::fillBuffer(void *data, size_t size) {
    ATRACE_CALL();
    if (mNumFramesPlayed == 0) {
        ALOGV("AudioCallback");
    }

    if (mReachedEOS) {
        return 0;
    }

    bool postSeekComplete = false;
    bool postEOS = false;
    int64_t postEOSDelayUs = 0;

    size_t size_done = 0;
    size_t size_remaining = size;
    while (size_remaining > 0) {
        MediaSource::ReadOptions options;
        bool refreshSeekTime = false;

        {
            Mutex::Autolock autoLock(mLock);

            if (mSeeking) {
                if (mIsFirstBuffer) {
                    if (mFirstBuffer != NULL) {
                        mFirstBuffer->release();
                        mFirstBuffer = NULL;
                    }
                    mIsFirstBuffer = false;
                }

                options.setSeekTo(mSeekTimeUs);
                refreshSeekTime = true;

                if (mInputBuffer != NULL) {
                    mInputBuffer->release();
                    mInputBuffer = NULL;
                }

                mSeeking = false;
                if (mObserver) {
                    postSeekComplete = true;
                }
            }
        }

        if (mInputBuffer == NULL) {
            status_t err;

            if (mIsFirstBuffer) {
                mInputBuffer = mFirstBuffer;
                mFirstBuffer = NULL;
                err = mFirstBufferResult;

                mIsFirstBuffer = false;
            } else {
                if(!mSourcePaused) {
                    err = mSource->read(&mInputBuffer, &options);
                    if (err == OK && mInputBuffer == NULL && mSourcePaused) {
                        ALOGV("mSourcePaused, return 0 from fillBuffer");
                        return 0;
                    }
                } else {
                    break;
                }
            }

            if(err == -EAGAIN) {
                if(mSourcePaused){
                    break;
                } else {
                    continue;
                }
            }

            CHECK((err == OK && mInputBuffer != NULL)
                   || (err != OK && mInputBuffer == NULL));

            Mutex::Autolock autoLock(mLock);

            if (err != OK && err != INFO_FORMAT_CHANGED) {
                if (!mReachedEOS) {
                    if (useOffload()) {
                        // After seek there is a possible race condition if
                        // OffloadThread is observing state_stopping_1 before
                        // framesReady() > 0. Ensure sink stop is called
                        // after last buffer is released. This ensures the
                        // partial buffer is written to the driver before
                        // stopping one is observed.The drawback is that
                        // there will be an unnecessary call to the parser
                        // after parser signalled EOS.

                        int64_t playPosition = 0;
                        playPosition = getOutputPlayPositionUs_l();
                        if ((size_done > 0) && (playPosition < mDurationUs)) {
                             ALOGW("send Partial buffer down\n");
                             ALOGW("skip calling stop till next fillBuffer\n");
                             break;
                        }

                        // no more buffers to push - stop() and wait for STREAM_END
                        // don't set mReachedEOS until stream end received
                        if (mAudioSink != NULL) {
                            mAudioSink->stop();
                        } else {
                            mAudioTrack->stop();
                        }
                    } else {
                        if (mObserver) {
                            // We don't want to post EOS right away but only
                            // after all frames have actually been played out.

                            // These are the number of frames submitted to the
                            // AudioTrack that you haven't heard yet.
                            uint32_t numFramesPendingPlayout =
                                getNumFramesPendingPlayout();

                            // These are the number of frames we're going to
                            // submit to the AudioTrack by returning from this
                            // callback.
                            uint32_t numAdditionalFrames = size_done / mFrameSize;

                            numFramesPendingPlayout += numAdditionalFrames;

                            int64_t timeToCompletionUs =
                                (1000000ll * numFramesPendingPlayout) / mSampleRate;

                            ALOGV("total number of frames played: %lld (%lld us)",
                                    (mNumFramesPlayed + numAdditionalFrames),
                                    1000000ll * (mNumFramesPlayed + numAdditionalFrames)
                                        / mSampleRate);

                            ALOGV("%d frames left to play, %lld us (%.2f secs)",
                                 numFramesPendingPlayout,
                                 timeToCompletionUs, timeToCompletionUs / 1E6);

                            postEOS = true;
                            if (mAudioSink->needsTrailingPadding()) {
                                postEOSDelayUs = timeToCompletionUs + mLatencyUs;
                            } else {
                                postEOSDelayUs = 0;
                            }
                        }

                        mReachedEOS = true;
                    }
                }

                mFinalStatus = err;
                break;
            }

            if (mAudioSink != NULL) {
                mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
            } else {
                mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
            }

            if(mInputBuffer->range_length() != 0) {
                CHECK(mInputBuffer->meta_data()->findInt64(
                        kKeyTime, &mPositionTimeMediaUs));
            }

            // need to adjust the mStartPosUs for offload decoding since parser
            // might not be able to get the exact seek time requested.
            if (refreshSeekTime) {
                if (useOffload()) {
                    if (postSeekComplete) {
                        ALOGV("fillBuffer is going to post SEEK_COMPLETE");
                        mObserver->postAudioSeekComplete();
                        postSeekComplete = false;
                    }

                    mStartPosUs = mPositionTimeMediaUs;
                    ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6);
                }
                // clear seek time with mLock locked and once we have valid mPositionTimeMediaUs
                // and mPositionTimeRealUs
                // before clearing mSeekTimeUs check if a new seek request has been received while
                // we were reading from the source with mLock released.
                if (!mSeeking) {
                    mSeekTimeUs = 0;
                }
            }

            if (!useOffload()) {
                mPositionTimeRealUs =
                    ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
                        / mSampleRate;
                ALOGV("buffer->size() = %d, "
                     "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
                     mInputBuffer->range_length(),
                     mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
            }

        }

        if (mInputBuffer->range_length() == 0) {
            mInputBuffer->release();
            mInputBuffer = NULL;

            continue;
        }

        size_t copy = size_remaining;
        if (copy > mInputBuffer->range_length()) {
            copy = mInputBuffer->range_length();
        }

        memcpy((char *)data + size_done,
               (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
               copy);

        mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
                                mInputBuffer->range_length() - copy);

        size_done += copy;
        size_remaining -= copy;
    }

    if (useOffload()) {
        // We must ask the hardware what it has played
        mPositionTimeRealUs = getOutputPlayPositionUs_l();
        ALOGV("mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
             mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
    }

    {
        Mutex::Autolock autoLock(mLock);
        mNumFramesPlayed += size_done / mFrameSize;

        if (mReachedEOS) {
            mPinnedTimeUs = mNumFramesPlayedSysTimeUs;
        } else {
            mNumFramesPlayedSysTimeUs = ALooper::GetNowUs();
            mPinnedTimeUs = -1ll;
        }
    }

    if (postEOS) {
        mObserver->postAudioEOS(postEOSDelayUs);
    }

    if (postSeekComplete) {
        mObserver->postAudioSeekComplete();
    }

    return size_done;
}
Esempio n. 15
0
bool OMXCodecProxy::IsWaitingResources()
{
  Mutex::Autolock autoLock(mLock);
  return mState == MediaResourceManagerClient::CLIENT_STATE_WAIT_FOR_RESOURCE;
}
size_t AudioPlayer::fillBuffer(void *data, size_t size) {
    if (mNumFramesPlayed == 0) {
        LOGV("AudioCallback");
    }

    if (mReachedEOS) {
        return 0;
    }

    size_t size_done = 0;
    size_t size_remaining = size;
    while (size_remaining > 0) {
        MediaSource::ReadOptions options;

        {
            Mutex::Autolock autoLock(mLock);

            if (mSeeking) {
                if (mIsFirstBuffer) {
                    if (mFirstBuffer != NULL) {
                        mFirstBuffer->release();
                        mFirstBuffer = NULL;
                    }
                    mIsFirstBuffer = false;
                }

                options.setSeekTo(mSeekTimeUs);

                if (mInputBuffer != NULL) {
                    mInputBuffer->release();
                    mInputBuffer = NULL;
                }

                mSeeking = false;
            }
        }

        if (mInputBuffer == NULL) {
            status_t err;

            if (mIsFirstBuffer) {
                mInputBuffer = mFirstBuffer;
                mFirstBuffer = NULL;
                err = mFirstBufferResult;

                mIsFirstBuffer = false;
            } else {
                err = mSource->read(&mInputBuffer, &options);
            }

            CHECK((err == OK && mInputBuffer != NULL)
                   || (err != OK && mInputBuffer == NULL));

            Mutex::Autolock autoLock(mLock);

            if (err != OK) {
                mReachedEOS = true;
                mFinalStatus = err;
                break;
            }

            CHECK(mInputBuffer->meta_data()->findInt64(
                        kKeyTime, &mPositionTimeMediaUs));

            mPositionTimeRealUs =
                ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
                    / mSampleRate;

            LOGV("buffer->size() = %d, "
                 "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
                 mInputBuffer->range_length(),
                 mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
        }

        if (mInputBuffer->range_length() == 0) {
            mInputBuffer->release();
            mInputBuffer = NULL;

            continue;
        }

        size_t copy = size_remaining;
        if (copy > mInputBuffer->range_length()) {
            copy = mInputBuffer->range_length();
        }

        memcpy((char *)data + size_done,
               (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
               copy);

        mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
                                mInputBuffer->range_length() - copy);

        size_done += copy;
        size_remaining -= copy;
    }

    Mutex::Autolock autoLock(mLock);
    mNumFramesPlayed += size_done / mFrameSize;

    return size_done;
}
Esempio n. 17
0
MediaResourceManagerClient::State OMXCodecProxy::getState()
{
  Mutex::Autolock autoLock(mLock);
  return mState;
}
void NuCachedSource2::fetchInternal() {
    ALOGV("fetchInternal");

    bool reconnect = false;

    {
        Mutex::Autolock autoLock(mLock);
        CHECK(mFinalStatus == OK || mNumRetriesLeft > 0);

        if (mFinalStatus != OK) {
            --mNumRetriesLeft;

            reconnect = true;
        }
    }

    if (reconnect) {
        status_t err =
            mSource->reconnectAtOffset(mCacheOffset + mCache->totalSize());

        Mutex::Autolock autoLock(mLock);

        if (err == ERROR_UNSUPPORTED || err == -EPIPE) {
            // These are errors that are not likely to go away even if we
            // retry, i.e. the server doesn't support range requests or similar.
            mNumRetriesLeft = 0;
            return;
        } else if (err != OK) {
            ALOGI("The attempt to reconnect failed, %d retries remaining",
                 mNumRetriesLeft);

            return;
        }
    }

    PageCache::Page *page = mCache->acquirePage();

    ssize_t n = mSource->readAt(
            mCacheOffset + mCache->totalSize(), page->mData, kPageSize);

    Mutex::Autolock autoLock(mLock);

    if (n < 0) {
        mFinalStatus = n;
        if (n == ERROR_UNSUPPORTED || n == -EPIPE) {
            // These are errors that are not likely to go away even if we
            // retry, i.e. the server doesn't support range requests or similar.
            mNumRetriesLeft = 0;
        }

        ALOGE("source returned error %ld, %d retries left", n, mNumRetriesLeft);
        mCache->releasePage(page);
    } else if (n == 0) {
        ALOGI("ERROR_END_OF_STREAM");

        mNumRetriesLeft = 0;
        mFinalStatus = ERROR_END_OF_STREAM;

        mCache->releasePage(page);
    } else {
        if (mFinalStatus != OK) {
            ALOGI("retrying a previously failed read succeeded.");
        }
        mNumRetriesLeft = kMaxNumRetries;
        mFinalStatus = OK;

        page->mSize = n;
        mCache->appendPage(page);
    }
}
Esempio n. 19
0
void
TiledContentHost::RenderTile(TileHost& aTile,
                             EffectChain& aEffectChain,
                             float aOpacity,
                             const gfx::Matrix4x4& aTransform,
                             const gfx::Filter& aFilter,
                             const gfx::Rect& aClipRect,
                             const nsIntRegion& aScreenRegion,
                             const IntPoint& aTextureOffset,
                             const IntSize& aTextureBounds,
                             const gfx::Rect& aVisibleRect)
{
  MOZ_ASSERT(!aTile.IsPlaceholderTile());

  AutoLockTextureHost autoLock(aTile.mTextureHost);
  AutoLockTextureHost autoLockOnWhite(aTile.mTextureHostOnWhite);
  if (autoLock.Failed() ||
      autoLockOnWhite.Failed()) {
    NS_WARNING("Failed to lock tile");
    return;
  }

  if (!aTile.mTextureHost->BindTextureSource(aTile.mTextureSource)) {
    return;
  }

  if (aTile.mTextureHostOnWhite && !aTile.mTextureHostOnWhite->BindTextureSource(aTile.mTextureSourceOnWhite)) {
    return;
  }

  RefPtr<TexturedEffect> effect =
    CreateTexturedEffect(aTile.mTextureSource,
                         aTile.mTextureSourceOnWhite,
                         aFilter,
                         true,
                         aTile.mTextureHost->GetRenderState());
  if (!effect) {
    return;
  }

  aEffectChain.mPrimaryEffect = effect;

  nsIntRegionRectIterator it(aScreenRegion);
  for (const IntRect* rect = it.Next(); rect != nullptr; rect = it.Next()) {
    Rect graphicsRect(rect->x, rect->y, rect->width, rect->height);
    Rect textureRect(rect->x - aTextureOffset.x, rect->y - aTextureOffset.y,
                     rect->width, rect->height);

    effect->mTextureCoords = Rect(textureRect.x / aTextureBounds.width,
                                  textureRect.y / aTextureBounds.height,
                                  textureRect.width / aTextureBounds.width,
                                  textureRect.height / aTextureBounds.height);
    mCompositor->DrawQuad(graphicsRect, aClipRect, aEffectChain, aOpacity, aTransform, aVisibleRect);
  }
  DiagnosticFlags flags = DiagnosticFlags::CONTENT | DiagnosticFlags::TILE;
  if (aTile.mTextureHostOnWhite) {
    flags |= DiagnosticFlags::COMPONENT_ALPHA;
  }
  mCompositor->DrawDiagnostics(flags,
                               aScreenRegion, aClipRect, aTransform, mFlashCounter);
  aTile.ReadUnlockPrevious();
}
size_t NuCachedSource2::cachedSize() {
    Mutex::Autolock autoLock(mLock);
    return mCacheOffset + mCache->totalSize();
}
int CedarAAudioPlayer::getSpace()
{
	Mutex::Autolock autoLock(mLock);
    return mAudioBufferSize;
}
size_t NuCachedSource2::approxDataRemaining(status_t *finalStatus) const {
    Mutex::Autolock autoLock(mLock);
    return approxDataRemaining_l(finalStatus);
}
bool AudioPlayer::isSeeking() {
    Mutex::Autolock autoLock(mLock);
    return mSeeking;
}
void NuCachedSource2::resumeFetchingIfNecessary() {
    Mutex::Autolock autoLock(mLock);

    restartPrefetcherIfNecessary_l(true /* ignore low water threshold */);
}
int64_t AudioPlayer::getRealTimeUs() {
    Mutex::Autolock autoLock(mLock);
    return getRealTimeUsLocked();
}
ssize_t CMMBDataSource::readAt(off64_t offset, void *data, size_t size) {
    Mutex::Autolock autoLock(mLock);

     //LOGE("CMMBDataSource::readAt, mIsAVC = %d", (!offset));
    if (NULL == data)
    {
        return 0;
    }

    if (0 == offset) //video
    { 
#if 1
        uint32_t EndSize;
        static int32_t FrameCount = 0;
	 static uint32_t preTimestamp = 0xFFFFFFFF;

FoundBoundary:
	
	 if ((NULL != pVideoFrame)
	 	   && (true == FindNaluBoundary(pVideoFrame->VideoFrameBuf, &FrameOffset, pVideoFrame->VideoFrameLen, &EndSize) )  )
	 {
	     memcpy((void *)data, (void *)pVideoFrame, sizeof(TCmmbVideoFrame));
	     memcpy((uint8_t *)data + sizeof(TCmmbVideoFrame), ((uint8_t*)(pVideoFrame->VideoFrameBuf) + FrameOffset),  EndSize);
	     FrameOffset += EndSize;
	     //LOGE("CMMBDataSource::readAt video findnalu frameoffset = %d", FrameOffset);
	     return (sizeof(TCmmbVideoFrame) + EndSize);
	 }
	 else
	 {
		 if (NULL != pVideoFrame)
		 {
#if 1
			 if(F_CmmbFreeVideoFrame)
			 {
				 F_CmmbFreeVideoFrame(pVideoFrame);
			 }
			 else
			 {
				 LOGE("Error F_CmmbFreeVideoFrame =NULL");
			 }

#else
			 CmmbFreeVideoFrame(pVideoFrame); 
#endif
			 pVideoFrame = NULL;
		 }
#if 1		
		 if(F_CmmbReadVideoFrame)
		 {
			 pVideoFrame = (TCmmbVideoFrame*)(F_CmmbReadVideoFrame());
		 }
		 else
		 {
			 LOGE("Error F_CmmbReadVideoFrame =NULL");
		 }
#else
		 pVideoFrame = CmmbReadVideoFrame();
#endif
       	 if (NULL == pVideoFrame)
       	 {
                   LOGE("CMMBDataSource::readAt videoframe is null");
       	     return 0;
       	 }
		 if (0xFFFFFFFF == preTimestamp)
		 {
                   preTimestamp = pVideoFrame->timestamp * 10 / 225;
		 }
		 else
		 {
                   if ((((pVideoFrame->timestamp * 10 / 225) - preTimestamp) > 50) 
			    || (((pVideoFrame->timestamp * 10 / 225) - preTimestamp) < 30))
                   {
                        //LOGE("CMMBDataSource::readAt  video timestamp maybe error, pretimestamp = %d, timestamp = %d",  preTimestamp,
							                                                                                                                   //    ((pVideoFrame->timestamp * 10 / 225)));
		     }
		 }
		 preTimestamp = pVideoFrame->timestamp * 10 / 225;
		 
       	 FrameOffset = 0;
		/* LOGE("CMMB readVideoFrame, readattimestamp = %d, count = %d ,[[ %x, %x, %x, %x, %x, %x, %x, %x",  (pVideoFrame->timestamp * 10 / 225), FrameCount, *((uint8_t *)pVideoFrame->VideoFrameBuf),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf + 1),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf + 2),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf + 3),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf + 4),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf + 5),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf + 6),
		 	                                                                             *((uint8_t *)pVideoFrame->VideoFrameBuf+ 7)  );*/
		 FrameCount ++;
		 goto FoundBoundary;
	 }

#else
        TCmmbFrameHeader* pVideoFrame;

	 pVideoFrame = filesource->CmmbReadVideoFrame();
	 if (NULL == pVideoFrame)
	 {
            LOGE("CMMBDataSource::readAt videoframe is null");
	     return 0;
	 }
	 
	 //copy data.
	 memcpy((void *)data, (void*)pVideoFrame, (sizeof(TCmmbFrameHeader) + pVideoFrame->frame_size));


	 filesource->CmmbFreeFrame(pVideoFrame); 
	 return (sizeof(TCmmbFrameHeader) + pVideoFrame->frame_size);
#endif	 

    }
    else if (1 == offset) //audio
    {
        TCmmbAudioFrame* pAudioFrame;
        uint32_t framesize;
	 static uint32_t frameNo = 0;
	 static uint32_t preTimestamp_audio = 0xFFFFFFFF;
#if 1
	 if(F_CmmbReadAudioFrame)
	 {
		 pAudioFrame = (TCmmbAudioFrame*)(F_CmmbReadAudioFrame());
	 }
	 else
	 {
		 LOGE("Error F_CmmbReadAudioFrame =NULL");
	 }
#else
	 pAudioFrame = CmmbReadAudioFrame();
#endif
	 if (NULL == pAudioFrame)
	 {
            LOGE("CMMBDataSource::readAt audioframe is null");
	     return 0;
	 }

		 if (0xFFFFFFFF == preTimestamp_audio)
		 {
                   preTimestamp_audio = pAudioFrame->timestamp * 10 / 225;
		 }
		 else
		 {
                   if ((((pAudioFrame->timestamp * 10 / 225) - preTimestamp_audio) > 50) 
			    || (((pAudioFrame->timestamp * 10 / 225) - preTimestamp_audio) < 30))
                   {
                        /*LOGE("CMMBDataSource::readAt audio timestamp maybe error, pretimestamp = %d, timestamp = %d",  preTimestamp_audio,
							                                                                                                                       ((pAudioFrame->timestamp * 10 / 225)));*/
		     }
		 }
		 preTimestamp_audio = pAudioFrame->timestamp * 10 / 225;	 

	 //copy data
	 //LOGE("CMMBDataSource audio Readat  readattimestamp = %d, framesize = %d, frameNo = %d",  (pAudioFrame->timestamp * 10 / 225), pAudioFrame->AudioFrameLen, frameNo);
	 frameNo ++;
	 memcpy((void *)data, (void*)pAudioFrame, (sizeof(TCmmbAudioFrame)));
        memcpy(((uint8_t *)data + sizeof(TCmmbAudioFrame)), (void*)pAudioFrame->AudioFrameBuf,  pAudioFrame->AudioFrameLen);
	 framesize = pAudioFrame->AudioFrameLen;
	 
#if 1
	 if(F_CmmbFreeAudioFrame)
	 {
		 F_CmmbFreeAudioFrame(pAudioFrame);
	 }
	 else
	 {
		 LOGE("Error F_CmmbFreeAudioFrame =NULL");
	 }
#else
	 CmmbFreeAudioFrame(pAudioFrame);
#endif	 

	 return (sizeof(TCmmbAudioFrame) + framesize);
	 
    }
    else
    {
        LOGE("readat offset param. error");
	 return 0;
    }

    
}
Esempio n. 27
0
void RemoteDisplayClient::waitUntilDone() {
    Mutex::Autolock autoLock(mLock);
    while (!mDone) {
        mCondition.wait(mLock);
    }
}
ssize_t NuPlayer::NuPlayerStreamListener::read(
        void *data, size_t size, sp<AMessage> *extra) {
    CHECK_GT(size, 0u);

    extra->clear();

    Mutex::Autolock autoLock(mLock);

    if (mEOS) {
        return 0;
    }

    if (mQueue.empty()) {
        mSendDataNotification = true;

        return -EWOULDBLOCK;
    }

    QueueEntry *entry = &*mQueue.begin();

    if (entry->mIsCommand) {
        switch (entry->mCommand) {
            case EOS:
            {
                mQueue.erase(mQueue.begin());
                entry = NULL;

                mEOS = true;
                return 0;
            }

            case DISCONTINUITY:
            {
                *extra = entry->mExtra;

                mQueue.erase(mQueue.begin());
                entry = NULL;

                return INFO_DISCONTINUITY;
            }

            default:
                TRESPASS();
                break;
        }
    }

    size_t copy = entry->mSize;
    if (copy > size) {
        copy = size;
    }

    if (entry->mIndex >= mBuffers.size()) {
        return ERROR_MALFORMED;
    }

    sp<IMemory> mem = mBuffers.editItemAt(entry->mIndex);
    if (mem == NULL || mem->size() < copy || mem->size() - copy < entry->mOffset) {
        return ERROR_MALFORMED;
    }

    memcpy(data,
           (const uint8_t *)mem->pointer()
            + entry->mOffset,
           copy);

    entry->mOffset += copy;
    entry->mSize -= copy;

    if (entry->mSize == 0) {
        mSource->onBufferAvailable(entry->mIndex);
        mQueue.erase(mQueue.begin());
        entry = NULL;
    }

    return copy;
}
VOID CParaNdisRX::ProcessRxRing(CCHAR nCurrCpuReceiveQueue)
{
    pRxNetDescriptor pBufferDescriptor;
    unsigned int nFullLength;

#ifndef PARANDIS_SUPPORT_RSS
    UNREFERENCED_PARAMETER(nCurrCpuReceiveQueue);
#endif

    TDPCSpinLocker autoLock(m_Lock);

    while (NULL != (pBufferDescriptor = (pRxNetDescriptor)m_VirtQueue.GetBuf(&nFullLength)))
    {
        RemoveEntryList(&pBufferDescriptor->listEntry);
        m_NetNofReceiveBuffers--;

        BOOLEAN packetAnalysisRC;

        packetAnalysisRC = ParaNdis_PerformPacketAnalysis(
#if PARANDIS_SUPPORT_RSS
            &m_Context->RSSParameters,
#endif

            &pBufferDescriptor->PacketInfo,
            pBufferDescriptor->PhysicalPages[PARANDIS_FIRST_RX_DATA_PAGE].Virtual,
            nFullLength - m_Context->nVirtioHeaderSize);


        if (!packetAnalysisRC)
        {
            pBufferDescriptor->Queue->ReuseReceiveBufferNoLock(pBufferDescriptor);
            m_Context->Statistics.ifInErrors++;
            m_Context->Statistics.ifInDiscards++;
            continue;
        }

#ifdef PARANDIS_SUPPORT_RSS
        CCHAR nTargetReceiveQueueNum;
        GROUP_AFFINITY TargetAffinity;
        PROCESSOR_NUMBER TargetProcessor;

        nTargetReceiveQueueNum = ParaNdis_GetScalingDataForPacket(
            m_Context,
            &pBufferDescriptor->PacketInfo,
            &TargetProcessor);

        if (nTargetReceiveQueueNum == PARANDIS_RECEIVE_UNCLASSIFIED_PACKET)
        {
            ParaNdis_ReceiveQueueAddBuffer(&m_UnclassifiedPacketsQueue, pBufferDescriptor);
        }
        else
        {
            ParaNdis_ReceiveQueueAddBuffer(&m_Context->ReceiveQueues[nTargetReceiveQueueNum], pBufferDescriptor);

            if (nTargetReceiveQueueNum != nCurrCpuReceiveQueue)
            {
                ParaNdis_ProcessorNumberToGroupAffinity(&TargetAffinity, &TargetProcessor);
                ParaNdis_QueueRSSDpc(m_Context, m_messageIndex, &TargetAffinity);
            }
        }
#else
       ParaNdis_ReceiveQueueAddBuffer(&m_UnclassifiedPacketsQueue, pBufferDescriptor);
#endif
    }
}
Esempio n. 30
0
MessagePool::~MessagePool() {
	AutoLock autoLock(lock);
	MAX_SIZE = 0;
	size = 0;
	pool = nullptr;
}