sp<MediaSource> StagefrightRecorder::createAudioSource() {
    sp<AudioSource> audioSource =
        new AudioSource(
                mAudioSource,
                mSampleRate,
                mAudioChannels);

    status_t err = audioSource->initCheck();

    if (err != OK) {
        LOGE("audio source is not initialized");
        return NULL;
    }

    sp<MetaData> encMeta = new MetaData;
    const char *mime;
    switch (mAudioEncoder) {
        case AUDIO_ENCODER_AMR_NB:
        case AUDIO_ENCODER_DEFAULT:
            mime = MEDIA_MIMETYPE_AUDIO_AMR_NB;
            break;
        case AUDIO_ENCODER_AMR_WB:
            mime = MEDIA_MIMETYPE_AUDIO_AMR_WB;
            break;
        case AUDIO_ENCODER_AAC:
            mime = MEDIA_MIMETYPE_AUDIO_AAC;
            break;
        default:
            LOGE("Unknown audio encoder: %d", mAudioEncoder);
            return NULL;
    }
    encMeta->setCString(kKeyMIMEType, mime);

    int32_t maxInputSize;
    CHECK(audioSource->getFormat()->findInt32(
                kKeyMaxInputSize, &maxInputSize));

    encMeta->setInt32(kKeyMaxInputSize, maxInputSize);
    encMeta->setInt32(kKeyChannelCount, mAudioChannels);
    encMeta->setInt32(kKeySampleRate, mSampleRate);
    encMeta->setInt32(kKeyBitRate, mAudioBitRate);
    if (mAudioTimeScale > 0) {
        encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
    }

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    sp<MediaSource> audioEncoder =
        OMXCodec::Create(client.interface(), encMeta,
                         true /* createEncoder */, audioSource);
    mAudioSourceNode = audioSource;

    return audioEncoder;
}
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    const int32_t kSampleRate = 22050;
    const int32_t kNumChannels = 2;
    sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels);

#if 0
    sp<MediaPlayerBase::AudioSink> audioSink;
    AudioPlayer *player = new AudioPlayer(audioSink);
    player->setSource(audioSource);
    player->start();

    sleep(10);

    player->stop();
#endif

    sp<MetaData> encMeta = new MetaData;
    encMeta->setCString(kKeyMIMEType,
            1 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
    encMeta->setInt32(kKeySampleRate, kSampleRate);
    encMeta->setInt32(kKeyChannelCount, kNumChannels);
    encMeta->setInt32(kKeyMaxInputSize, 8192);

    sp<MediaSource> encoder =
        OMXCodec::Create(client.interface(), encMeta, true, audioSource);

    encoder->start();

    int32_t n = 0;
    status_t err;
    MediaBuffer *buffer;
    while ((err = encoder->read(&buffer)) == OK) {
        printf(".");
        fflush(stdout);

        buffer->release();
        buffer = NULL;

        if (++n == 100) {
            break;
        }
    }
    printf("$\n");

    encoder->stop();

    client.disconnect();

    return 0;
}
示例#3
0
OMX_ERRORTYPE PREFIX(OMX_Init)(void)
{
    OMXClient client;
    if (client.connect() != OK)
        return OMX_ErrorUndefined;

    if (!ctx)
        ctx = new IOMXContext();
    ctx->iomx = client.interface();
    ctx->iomx->listNodes(&ctx->components);
    return OMX_ErrorNone;
}
status_t MediaCodecList::getCodecCapabilities(
        size_t index, const char *type,
        Vector<ProfileLevel> *profileLevels,
        Vector<uint32_t> *colorFormats,
        uint32_t *flags) const {
    profileLevels->clear();
    colorFormats->clear();

    if (index >= mCodecInfos.size()) {
        return -ERANGE;
    }

    const CodecInfo &info = mCodecInfos.itemAt(index);

    OMXClient client;
    status_t err = client.connect();
    if (err != OK) {
        return err;
    }

    CodecCapabilities caps;
    err = QueryCodec(
            client.interface(),
            info.mName.c_str(), type, info.mIsEncoder, &caps);

    if (err != OK) {
        return err;
    }

    for (size_t i = 0; i < caps.mProfileLevels.size(); ++i) {
        const CodecProfileLevel &src = caps.mProfileLevels.itemAt(i);

        ProfileLevel profileLevel;
        profileLevel.mProfile = src.mProfile;
        profileLevel.mLevel = src.mLevel;
        profileLevels->push(profileLevel);
    }

    for (size_t i = 0; i < caps.mColorFormats.size(); ++i) {
        colorFormats->push(caps.mColorFormats.itemAt(i));
    }

    *flags = caps.mFlags;

    return OK;
}
示例#5
0
文件: iomx.cpp 项目: 2BReality/xbmc
OMX_ERRORTYPE PREFIX(OMX_Init)(void)
{
  android_printf("OMX_Init\n");
    OMXClient client;
    if (client.connect() != OK)
        return OMX_ErrorUndefined;

    if (!ctx)
        ctx = new IOMXContext();
    ctx->iomx = client.interface();
    ctx->iomx->listNodes(&ctx->components);

    for (List<IOMX::ComponentInfo>::iterator it = ctx->components.begin(); it != ctx->components.end(); it++)
    {
      const IOMX::ComponentInfo &info = *it;
      const char* componentName = info.mName.string();
      for (List<String8>::const_iterator role_it = info.mRoles.begin(); role_it != info.mRoles.end(); role_it++)
      {
        const char* componentRole = (*role_it).string();
        android_printf("componentName:%s,componentRole:%s\n", componentName, componentRole);
      }
    }
    return OMX_ErrorNone;
}
bool OmxDecoder::Init() {
#ifdef PR_LOGGING
  if (!gOmxDecoderLog) {
    gOmxDecoderLog = PR_NewLogModule("OmxDecoder");
  }
#endif

  //register sniffers, if they are not registered in this process.
  DataSource::RegisterDefaultSniffers();

  sp<DataSource> dataSource = new MediaStreamSource(mResource, mDecoder);
  if (dataSource->initCheck()) {
    NS_WARNING("Initializing DataSource for OMX decoder failed");
    return false;
  }

  mResource->SetReadMode(MediaCacheStream::MODE_METADATA);

  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
  if (extractor == nullptr) {
    NS_WARNING("Could not create MediaExtractor");
    return false;
  }

  ssize_t audioTrackIndex = -1;
  ssize_t videoTrackIndex = -1;
  const char *audioMime = nullptr;

  for (size_t i = 0; i < extractor->countTracks(); ++i) {
    sp<MetaData> meta = extractor->getTrackMetaData(i);

    int32_t bitRate;
    if (!meta->findInt32(kKeyBitRate, &bitRate))
      bitRate = 0;

    const char *mime;
    if (!meta->findCString(kKeyMIMEType, &mime)) {
      continue;
    }

    if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
      videoTrackIndex = i;
    } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
      audioTrackIndex = i;
      audioMime = mime;
    }
  }

  if (videoTrackIndex == -1 && audioTrackIndex == -1) {
    NS_WARNING("OMX decoder could not find video or audio tracks");
    return false;
  }

  mResource->SetReadMode(MediaCacheStream::MODE_PLAYBACK);

  int64_t totalDurationUs = 0;

  mNativeWindow = new GonkNativeWindow();
  mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow);

  // OMXClient::connect() always returns OK and abort's fatally if
  // it can't connect.
  OMXClient client;
  status_t err = client.connect();
  NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver.");
  sp<IOMX> omx = client.interface();

  sp<MediaSource> videoTrack;
  sp<MediaSource> videoSource;
  if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != nullptr) {
    // Experience with OMX codecs is that only the HW decoders are
    // worth bothering with, at least on the platforms where this code
    // is currently used, and for formats this code is currently used
    // for (h.264).  So if we don't get a hardware decoder, just give
    // up.
    int flags = kHardwareCodecsOnly;
    videoSource = OMXCodec::Create(omx,
                                   videoTrack->getFormat(),
                                   false, // decoder
                                   videoTrack,
                                   nullptr,
                                   flags,
                                   mNativeWindowClient);
    if (videoSource == nullptr) {
      NS_WARNING("Couldn't create OMX video source");
      return false;
    }

    // Check if this video is sized such that we're comfortable
    // possibly using an OMX decoder.
    int32_t maxWidth, maxHeight;
    char propValue[PROPERTY_VALUE_MAX];
    property_get("ro.moz.omx.hw.max_width", propValue, "-1");
    maxWidth = atoi(propValue);
    property_get("ro.moz.omx.hw.max_height", propValue, "-1");
    maxHeight = atoi(propValue);

    int32_t width = -1, height = -1;
    if (maxWidth > 0 && maxHeight > 0 &&
        !(videoSource->getFormat()->findInt32(kKeyWidth, &width) &&
          videoSource->getFormat()->findInt32(kKeyHeight, &height) &&
          width * height <= maxWidth * maxHeight)) {
      printf_stderr("Failed to get video size, or it was too large for HW decoder (<w=%d, h=%d> but <maxW=%d, maxH=%d>)",
                    width, height, maxWidth, maxHeight);
      return false;
    }

    if (videoSource->start() != OK) {
      NS_WARNING("Couldn't start OMX video source");
      return false;
    }

    int64_t durationUs;
    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  sp<MediaSource> audioTrack;
  sp<MediaSource> audioSource;
  if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != nullptr)
  {
    if (!strcasecmp(audioMime, "audio/raw")) {
      audioSource = audioTrack;
    } else {
      audioSource = OMXCodec::Create(omx,
                                     audioTrack->getFormat(),
                                     false, // decoder
                                     audioTrack);
    }
    if (audioSource == nullptr) {
      NS_WARNING("Couldn't create OMX audio source");
      return false;
    }
    if (audioSource->start() != OK) {
      NS_WARNING("Couldn't start OMX audio source");
      return false;
    }

    int64_t durationUs;
    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  // set decoder state
  mVideoTrack = videoTrack;
  mVideoSource = videoSource;
  mAudioTrack = audioTrack;
  mAudioSource = audioSource;
  mDurationUs = totalDurationUs;

  if (mVideoSource.get() && !SetVideoFormat()) {
    NS_WARNING("Couldn't set OMX video format");
    return false;
  }

  // To reliably get the channel and sample rate data we need to read from the
  // audio source until we get a INFO_FORMAT_CHANGE status
  if (mAudioSource.get()) {
    status_t err = mAudioSource->read(&mAudioBuffer);
    if (err != INFO_FORMAT_CHANGED) {
      if (err != OK) {
        NS_WARNING("Couldn't read audio buffer from OMX decoder");
        return false;
      }
      sp<MetaData> meta = mAudioSource->getFormat();
      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
        NS_WARNING("Couldn't get audio metadata from OMX decoder");
        return false;
      }
      mAudioMetadataRead = true;
    }
    else if (!SetAudioFormat()) {
      NS_WARNING("Couldn't set audio format");
      return false;
    }
  }

  return true;
}
M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
        sp<MetaData> metaData) {
    M4OSA_ERR err = M4NO_ERROR;
    VideoEditorVideoEncoder_Context*  pEncoderContext = M4OSA_NULL;
    status_t result = OK;
    int32_t nbBuffer = 0;
    int32_t stride = 0;
    int32_t height = 0;
    int32_t framerate = 0;
    int32_t isCodecConfig = 0;
    size_t size = 0;
    uint32_t codecFlags = 0;
    MediaBuffer* inputBuffer = NULL;
    MediaBuffer* outputBuffer = NULL;
    sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
    sp<MediaSource> encoder = NULL;;
    OMXClient client;

    ALOGV("VideoEditorVideoEncoder_getDSI begin");
    // Input parameters check
    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
    VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);

    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);

    // Create the encoder source
    encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
    VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);

    // Connect to the OMX client
    result = client.connect();
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);

    // Create the OMX codec
    // VIDEOEDITOR_FORCECODEC MUST be defined here
    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
    encoder = OMXCodec::Create(client.interface(), metaData, true,
        encoderSource, NULL, codecFlags);
    VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);

    /**
     * Send fake frames and retrieve the DSI
     */
    // Send a fake frame to the source
    metaData->findInt32(kKeyStride,     &stride);
    metaData->findInt32(kKeyHeight,     &height);
    metaData->findInt32(kKeySampleRate, &framerate);
    size = (size_t)(stride*height*3)/2;
    inputBuffer = new MediaBuffer(size);
    inputBuffer->meta_data()->setInt64(kKeyTime, 0);
    nbBuffer = encoderSource->storeBuffer(inputBuffer);
    encoderSource->storeBuffer(NULL); // Signal EOS

    // Call read once to get the DSI
    result = encoder->start();;
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    result = encoder->read(&outputBuffer, NULL);
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
        kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);

    VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
    if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
        // For H264, format the DSI
        result = buildAVCCodecSpecificData(
            (uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
            (size_t*)(&(pEncoderContext->mHeader.Size)),
            (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
            outputBuffer->range_length(), encoder->getFormat().get());
        outputBuffer->release();
        VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    } else {
        // For MPEG4, just copy the DSI
        pEncoderContext->mHeader.Size =
            (M4OSA_UInt32)outputBuffer->range_length();
        SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
            pEncoderContext->mHeader.Size, "Encoder header");
        memcpy((void *)pEncoderContext->mHeader.pBuf,
            (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
            pEncoderContext->mHeader.Size);
        outputBuffer->release();
    }

    result = encoder->stop();
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);

cleanUp:
    // Destroy the graph
    if ( encoder != NULL ) { encoder.clear(); }
    client.disconnect();
    if ( encoderSource != NULL ) { encoderSource.clear(); }
    if ( M4NO_ERROR == err ) {
        ALOGV("VideoEditorVideoEncoder_getDSI no error");
    } else {
        ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
    }
    ALOGV("VideoEditorVideoEncoder_getDSI end");
    return err;
}
示例#8
0
bool OmxDecoder::AllocateMediaResources()
{
  if ((mVideoTrack != nullptr) && (mVideoSource == nullptr)) {
    // OMXClient::connect() always returns OK and abort's fatally if
    // it can't connect.
    OMXClient client;
    DebugOnly<status_t> err = client.connect();
    NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver.");
    sp<IOMX> omx = client.interface();

    mNativeWindow = new GonkNativeWindow();
#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 17
    mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue());
#else
    mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow);
#endif

    // Experience with OMX codecs is that only the HW decoders are
    // worth bothering with, at least on the platforms where this code
    // is currently used, and for formats this code is currently used
    // for (h.264).  So if we don't get a hardware decoder, just give
    // up.
#ifdef MOZ_OMX_WEBM_DECODER
    int flags = 0;//fallback to omx sw decoder if there is no hw decoder
#else
    int flags = kHardwareCodecsOnly;
#endif//MOZ_OMX_WEBM_DECODER

    if (isInEmulator()) {
      // If we are in emulator, allow to fall back to software.
      flags = 0;
    }
    mVideoSource =
          OMXCodecProxy::Create(omx,
                                mVideoTrack->getFormat(),
                                false, // decoder
                                mVideoTrack,
                                nullptr,
                                flags,
                                mNativeWindowClient);
    if (mVideoSource == nullptr) {
      NS_WARNING("Couldn't create OMX video source");
      return false;
    } else {
      sp<OMXCodecProxy::EventListener> listener = this;
      mVideoSource->setEventListener(listener);
      mVideoSource->requestResource();
    }
  }

  if ((mAudioTrack != nullptr) && (mAudioSource == nullptr)) {
    // OMXClient::connect() always returns OK and abort's fatally if
    // it can't connect.
    OMXClient client;
    DebugOnly<status_t> err = client.connect();
    NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver.");
    sp<IOMX> omx = client.interface();

    const char *audioMime = nullptr;
    sp<MetaData> meta = mAudioTrack->getFormat();
    if (!meta->findCString(kKeyMIMEType, &audioMime)) {
      return false;
    }
    if (!strcasecmp(audioMime, "audio/raw")) {
      mAudioSource = mAudioTrack;
    } else {
      // try to load hardware codec in mediaserver process.
      int flags = kHardwareCodecsOnly;
      mAudioSource = OMXCodec::Create(omx,
                                     mAudioTrack->getFormat(),
                                     false, // decoder
                                     mAudioTrack,
                                     nullptr,
                                     flags);
    }

    if (mAudioSource == nullptr) {
      // try to load software codec in this process.
      int flags = kSoftwareCodecsOnly;
      mAudioSource = OMXCodec::Create(GetOMX(),
                                     mAudioTrack->getFormat(),
                                     false, // decoder
                                     mAudioTrack,
                                     nullptr,
                                     flags);
      if (mAudioSource == nullptr) {
        NS_WARNING("Couldn't create OMX audio source");
        return false;
      }
    }
    if (mAudioSource->start() != OK) {
      NS_WARNING("Couldn't start OMX audio source");
      mAudioSource.clear();
      return false;
    }
  }
  return true;
}
示例#9
0
RefPtr<mozilla::MediaOmxCommonReader::MediaResourcePromise>
OmxDecoder::AllocateMediaResources()
{
  RefPtr<MediaResourcePromise> p = mMediaResourcePromise.Ensure(__func__);

  if ((mVideoTrack != nullptr) && (mVideoSource == nullptr)) {
    // OMXClient::connect() always returns OK and abort's fatally if
    // it can't connect.
    OMXClient client;
    DebugOnly<status_t> err = client.connect();
    NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver.");
    sp<IOMX> omx = client.interface();

#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21
    sp<IGraphicBufferProducer> producer;
    sp<IGonkGraphicBufferConsumer> consumer;
    GonkBufferQueue::createBufferQueue(&producer, &consumer);
    mNativeWindow = new GonkNativeWindow(consumer);
#else
    mNativeWindow = new GonkNativeWindow();
#endif

#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21
    mNativeWindowClient = new Surface(producer);
#elif defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 17
    mNativeWindowClient = new Surface(mNativeWindow->getBufferQueue());
#else
    mNativeWindowClient = new SurfaceTextureClient(mNativeWindow);
#endif

    // Experience with OMX codecs is that only the HW decoders are
    // worth bothering with, at least on the platforms where this code
    // is currently used, and for formats this code is currently used
    // for (h.264).  So if we don't get a hardware decoder, just give
    // up.
#ifdef MOZ_OMX_WEBM_DECODER
    int flags = 0;//fallback to omx sw decoder if there is no hw decoder
#else
    int flags = kHardwareCodecsOnly;
#endif//MOZ_OMX_WEBM_DECODER

    if (isInEmulator()) {
      // If we are in emulator, allow to fall back to software.
      flags = 0;
    }
    mVideoSource =
          OMXCodecProxy::Create(omx,
                                mVideoTrack->getFormat(),
                                false, // decoder
                                mVideoTrack,
                                nullptr,
                                flags,
                                mNativeWindowClient);
    if (mVideoSource == nullptr) {
      NS_WARNING("Couldn't create OMX video source");
      mMediaResourcePromise.Reject(true, __func__);
      return p;
    } else {
      sp<OmxDecoder> self = this;
      mVideoCodecRequest.Begin(mVideoSource->requestResource()
        ->Then(OwnerThread(), __func__,
          [self] (bool) -> void {
            self->mVideoCodecRequest.Complete();
            self->mMediaResourcePromise.ResolveIfExists(true, __func__);
          }, [self] (bool) -> void {
            self->mVideoCodecRequest.Complete();
            self->mMediaResourcePromise.RejectIfExists(true, __func__);
          }));
    }
  }

  if ((mAudioTrack != nullptr) && (mAudioSource == nullptr)) {
    // OMXClient::connect() always returns OK and abort's fatally if
    // it can't connect.
    OMXClient client;
    DebugOnly<status_t> err = client.connect();
    NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver.");
    sp<IOMX> omx = client.interface();

    const char *audioMime = nullptr;
    sp<MetaData> meta = mAudioTrack->getFormat();
    if (!meta->findCString(kKeyMIMEType, &audioMime)) {
      mMediaResourcePromise.Reject(true, __func__);
      return p;
    }
    if (!strcasecmp(audioMime, "audio/raw")) {
      mAudioSource = mAudioTrack;
    } else {
      // try to load hardware codec in mediaserver process.
      int flags = kHardwareCodecsOnly;
      mAudioSource = OMXCodec::Create(omx,
                                     mAudioTrack->getFormat(),
                                     false, // decoder
                                     mAudioTrack,
                                     nullptr,
                                     flags);
    }

    if (mAudioSource == nullptr) {
      // try to load software codec in this process.
      int flags = kSoftwareCodecsOnly;
      mAudioSource = OMXCodec::Create(GetOMX(),
                                     mAudioTrack->getFormat(),
                                     false, // decoder
                                     mAudioTrack,
                                     nullptr,
                                     flags);
      if (mAudioSource == nullptr) {
        NS_WARNING("Couldn't create OMX audio source");
        mMediaResourcePromise.Reject(true, __func__);
        return p;
      }
    }
    if (mAudioSource->start() != OK) {
      NS_WARNING("Couldn't start OMX audio source");
      mAudioSource.clear();
      mMediaResourcePromise.Reject(true, __func__);
      return p;
    }
  }
  if (!mVideoSource.get()) {
    // No resource allocation wait.
    mMediaResourcePromise.Resolve(true, __func__);
  }
  return p;
}
示例#10
0
int main(int argc, char **argv) {

    // Default values for the program if not overwritten
    int frameRateFps = 30;
    int width = 176;
    int height = 144;
    int bitRateBps = 300000;
    int iFramesIntervalSeconds = 1;
    int colorFormat = OMX_COLOR_FormatYUV420Planar;
    int nFrames = 300;
    int level = -1;        // Encoder specific default
    int profile = -1;      // Encoder specific default
    int codec = 0;
    const char *fileName = "/sdcard/output.mp4";

    android::ProcessState::self()->startThreadPool();
    int res;
    while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
        switch (res) {
            case 'b':
            {
                bitRateBps = atoi(optarg);
                break;
            }

            case 'c':
            {
                colorFormat = translateColorToOmxEnumValue(atoi(optarg));
                if (colorFormat == -1) {
                    usage(argv[0]);
                }
                break;
            }

            case 'f':
            {
                frameRateFps = atoi(optarg);
                break;
            }

            case 'i':
            {
                iFramesIntervalSeconds = atoi(optarg);
                break;
            }

            case 'n':
            {
                nFrames = atoi(optarg);
                break;
            }

            case 'w':
            {
                width = atoi(optarg);
                break;
            }

            case 't':
            {
                height = atoi(optarg);
                break;
            }

            case 'l':
            {
                level = atoi(optarg);
                break;
            }

            case 'p':
            {
                profile = atoi(optarg);
                break;
            }

            case 'v':
            {
                codec = atoi(optarg);
                if (codec < 0 || codec > 2) {
                    usage(argv[0]);
                }
                break;
            }

            case 'h':
            default:
            {
                usage(argv[0]);
                break;
            }
        }
    }

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    status_t err = OK;
    sp<MediaSource> source =
        new DummySource(width, height, nFrames, frameRateFps, colorFormat);

    sp<MetaData> enc_meta = new MetaData;
    switch (codec) {
        case 1:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
            break;
        case 2:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
            break;
        default:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
            break;
    }
    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);
    enc_meta->setInt32(kKeyFrameRate, frameRateFps);
    enc_meta->setInt32(kKeyBitRate, bitRateBps);
    enc_meta->setInt32(kKeyStride, width);
    enc_meta->setInt32(kKeySliceHeight, height);
    enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
    enc_meta->setInt32(kKeyColorFormat, colorFormat);
    if (level != -1) {
        enc_meta->setInt32(kKeyVideoLevel, level);
    }
    if (profile != -1) {
        enc_meta->setInt32(kKeyVideoProfile, profile);
    }

    sp<MediaSource> encoder =
        OMXCodec::Create(
                client.interface(), enc_meta, true /* createEncoder */, source);

    sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
    writer->addSource(encoder);
    int64_t start = systemTime();
    CHECK_EQ(OK, writer->start());
    while (!writer->reachedEOS()) {
    }
    err = writer->stop();
    int64_t end = systemTime();

    fprintf(stderr, "$\n");
    client.disconnect();

    if (err != OK && err != ERROR_END_OF_STREAM) {
        fprintf(stderr, "record failed: %d\n", err);
        return 1;
    }
    fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
    fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
    return 0;
}
status_t MediaCodecList::getCodecCapabilities(
        size_t index, const char *type,
        Vector<ProfileLevel> *profileLevels,
        Vector<uint32_t> *colorFormats,
        uint32_t *flags) const {
    profileLevels->clear();
    colorFormats->clear();

#ifndef ANDROID_DEFAULT_CODE
	ALOGI("[%s][ index=%d, mCodecInfos.size()=%d] ",__FUNCTION__,index,mCodecInfos.size() );
#endif	

    if (index >= mCodecInfos.size()) {
        return -ERANGE;
    }

    const CodecInfo &info = mCodecInfos.itemAt(index);

    OMXClient client;
    status_t err = client.connect();
    if (err != OK) {
#ifndef ANDROID_DEFAULT_CODE
	ALOGI("[%s][ err1=%d  ] ",__FUNCTION__,err );
#endif			
        return err;
    }

#ifndef ANDROID_DEFAULT_CODE
	ALOGI("[%s][ connect OK ] ",__FUNCTION__ );
#endif

    CodecCapabilities caps;
    err = QueryCodec(
            client.interface(),
            info.mName.c_str(), type, info.mIsEncoder, &caps);

    if (err != OK) {
#ifndef ANDROID_DEFAULT_CODE
    ALOGI("[%s][ err2=%d ] ",__FUNCTION__,err );
#endif
        return err;
    }

#ifndef ANDROID_DEFAULT_CODE
    ALOGI("[%s][ QueryCodec OK ] ",__FUNCTION__ );
    int memTotalBytes = sysconf(_SC_PHYS_PAGES) * PAGE_SIZE;
    ALOGD("native_get_videoeditor_profile: mIsEncoder %d, memTotalBytes %d bytes", info.mIsEncoder, memTotalBytes);
#endif //ANDROID_DEFAULT_CODE

    for (size_t i = 0; i < caps.mProfileLevels.size(); ++i) {
        const CodecProfileLevel &src = caps.mProfileLevels.itemAt(i);

        ProfileLevel profileLevel;
        profileLevel.mProfile = src.mProfile;
        profileLevel.mLevel = src.mLevel;
#ifndef ANDROID_DEFAULT_CODE
        ALOGD("mProfile %d mLevel %d", src.mProfile, src.mLevel);
        //for CTS case "EncodeVirtualDisplayWithCompositionTest "
        // Limit max recording resolution to 1280x720, if phone's ram <= 512MB
        if (memTotalBytes <= (512*1024*1024)) {
            if( (1==info.mIsEncoder)&& (OMX_VIDEO_AVCLevel4<=src.mLevel) )
            {
                ALOGD("skip once, memory may no be enough during large size video recording", src.mProfile, src.mLevel);
                continue;
            }
        }
#endif //ANDROID_DEFAULT_CODE
        profileLevels->push(profileLevel);
    }

    for (size_t i = 0; i < caps.mColorFormats.size(); ++i) {
#ifndef ANDROID_DEFAULT_CODE
    //for CTS case "com.android.cts.videoperf.VideoEncoderDecoderTest "
    //push one more format(YUV420) if there is under the decoding and with MTKBLK or MTKYV12 format
    if( (0==info.mIsEncoder)&&( OMX_COLOR_FormatVendorMTKYUV==caps.mColorFormats.itemAt(i) || 
        OMX_MTK_COLOR_FormatYV12==caps.mColorFormats.itemAt(i) || OMX_COLOR_FormatVendorMTKYUV_FCM==caps.mColorFormats.itemAt(i) ) )
    {
        colorFormats->push(OMX_COLOR_FormatYUV420Planar);
        ALOGI("itemAt(i) %x, isEncoder %d ", caps.mColorFormats.itemAt(i), info.mIsEncoder );
    }
#endif //ANDROID_DEFAULT_CODE
        colorFormats->push(caps.mColorFormats.itemAt(i));
    }

    *flags = caps.mFlags;
#ifndef ANDROID_DEFAULT_CODE
    ALOGI("[%s][ OK ] ",__FUNCTION__  );
#endif
    return OK;
}
示例#12
0
bool OmxDecoder::AllocateMediaResources()
{
  // OMXClient::connect() always returns OK and abort's fatally if
  // it can't connect.
  OMXClient client;
  DebugOnly<status_t> err = client.connect();
  NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver.");
  sp<IOMX> omx = client.interface();

  if ((mVideoTrack != nullptr) && (mVideoSource == nullptr)) {
    mNativeWindow = new GonkNativeWindow();
    mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow);

    // Experience with OMX codecs is that only the HW decoders are
    // worth bothering with, at least on the platforms where this code
    // is currently used, and for formats this code is currently used
    // for (h.264).  So if we don't get a hardware decoder, just give
    // up.
    int flags = kHardwareCodecsOnly;

    char propQemu[PROPERTY_VALUE_MAX];
    property_get("ro.kernel.qemu", propQemu, "");
    if (!strncmp(propQemu, "1", 1)) {
      // If we are in emulator, allow to fall back to software.
      flags = 0;
    }
    mVideoSource =
          OMXCodecProxy::Create(omx,
                                mVideoTrack->getFormat(),
                                false, // decoder
                                mVideoTrack,
                                nullptr,
                                flags,
                                mNativeWindowClient);
    if (mVideoSource == nullptr) {
      NS_WARNING("Couldn't create OMX video source");
      return false;
    } else {
      sp<OMXCodecProxy::EventListener> listener = this;
      mVideoSource->setEventListener(listener);
      mVideoSource->requestResource();
    }
  }

  if ((mAudioTrack != nullptr) && (mAudioSource == nullptr)) {
    const char *audioMime = nullptr;
    sp<MetaData> meta = mAudioTrack->getFormat();
    if (!meta->findCString(kKeyMIMEType, &audioMime)) {
      return false;
    }
    if (!strcasecmp(audioMime, "audio/raw")) {
      mAudioSource = mAudioTrack;
    } else {
      // try to load hardware codec in mediaserver process.
      int flags = kHardwareCodecsOnly;
      mAudioSource = OMXCodec::Create(omx,
                                     mAudioTrack->getFormat(),
                                     false, // decoder
                                     mAudioTrack,
                                     nullptr,
                                     flags);
    }

    if (mAudioSource == nullptr) {
      // try to load software codec in this process.
      int flags = kSoftwareCodecsOnly;
      mAudioSource = OMXCodec::Create(GetOMX(),
                                     mAudioTrack->getFormat(),
                                     false, // decoder
                                     mAudioTrack,
                                     nullptr,
                                     flags);
      if (mAudioSource == nullptr) {
        NS_WARNING("Couldn't create OMX audio source");
        return false;
      }
    }
    if (mAudioSource->start() != OK) {
      NS_WARNING("Couldn't start OMX audio source");
      return false;
    }
  }
  return true;
}
示例#13
0
void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {
    // get href_base
    char *href_base_end = strrchr(codecs_xml, '/');
    if (href_base_end != NULL) {
        mHrefBase = AString(codecs_xml, href_base_end - codecs_xml + 1);
    }

    mInitCheck = OK; // keeping this here for safety
    mCurrentSection = SECTION_TOPLEVEL;
    mDepth = 0;

    OMXClient client;
    mInitCheck = client.connect();
    if (mInitCheck != OK) {
        return;
    }
    mOMX = client.interface();
    parseXMLFile(codecs_xml);
    mOMX.clear();

    if (mInitCheck != OK) {
        mCodecInfos.clear();
        return;
    }

    for (size_t i = mCodecInfos.size(); i-- > 0;) {
        const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();

        if (info.mCaps.size() == 0) {
            // No types supported by this component???
            ALOGW("Component %s does not support any type of media?",
                  info.mName.c_str());

            mCodecInfos.removeAt(i);
#if LOG_NDEBUG == 0
        } else {
            for (size_t type_ix = 0; type_ix < info.mCaps.size(); ++type_ix) {
                AString mime = info.mCaps.keyAt(type_ix);
                const sp<MediaCodecInfo::Capabilities> &caps = info.mCaps.valueAt(type_ix);

                ALOGV("%s codec info for %s: %s", info.mName.c_str(), mime.c_str(),
                        caps->getDetails()->debugString().c_str());
                ALOGV("    flags=%d", caps->getFlags());
                {
                    Vector<uint32_t> colorFormats;
                    caps->getSupportedColorFormats(&colorFormats);
                    AString nice;
                    for (size_t ix = 0; ix < colorFormats.size(); ix++) {
                        if (ix > 0) {
                            nice.append(", ");
                        }
                        nice.append(colorFormats.itemAt(ix));
                    }
                    ALOGV("    colors=[%s]", nice.c_str());
                }
                {
                    Vector<MediaCodecInfo::ProfileLevel> profileLevels;
                    caps->getSupportedProfileLevels(&profileLevels);
                    AString nice;
                    for (size_t ix = 0; ix < profileLevels.size(); ix++) {
                        if (ix > 0) {
                            nice.append(", ");
                        }
                        const MediaCodecInfo::ProfileLevel &pl =
                            profileLevels.itemAt(ix);
                        nice.append(pl.mProfile);
                        nice.append("/");
                        nice.append(pl.mLevel);
                    }
                    ALOGV("    levels=[%s]", nice.c_str());
                }
            }
#endif
        }
    }
	// for CTS 
#ifdef MTK_AOSP_ENHANCEMENT
#ifndef MTK_WMA_PLAYBACK_SUPPORT
		const char* wma_name = "OMX.MTK.AUDIO.DECODER.WMA";
		deleteByType(wma_name);
#endif
#ifndef MTK_SWIP_WMAPRO
		const char* wmapro_name = "OMX.MTK.AUDIO.DECODER.WMAPRO";
		deleteByType(wmapro_name);
#endif
#ifndef MTK_AUDIO_RAW_SUPPORT
		const char* pcm_name = "OMX.MTK.AUDIO.DECODER.RAW";
		deleteByType(pcm_name);
#endif
#ifndef MTK_AUDIO_DDPLUS_SUPPORT
		const char* DDPLUS_name1 = "OMX.dolby.ac3.decoder";
		deleteByType(DDPLUS_name1);
		const char* DDPLUS_name2 = "OMX.dolby.ec3.decoder";
		deleteByType(DDPLUS_name2);
#endif
#ifndef MTK_AUDIO_APE_SUPPORT
		const char* ape_name = "OMX.MTK.AUDIO.DECODER.APE";
		deleteByType(ape_name);
#endif
#endif

#if 0
    for (size_t i = 0; i < mCodecInfos.size(); ++i) {
        const CodecInfo &info = mCodecInfos.itemAt(i);

        AString line = info.mName;
        line.append(" supports ");
        for (size_t j = 0; j < mTypes.size(); ++j) {
            uint32_t value = mTypes.valueAt(j);

            if (info.mTypes & (1ul << value)) {
                line.append(mTypes.keyAt(j));
                line.append(" ");
            }
        }

        ALOGI("%s", line.c_str());
    }
#endif
}
示例#14
0
int main() {
    // We only have an AMR-WB encoder on sholes...
    static bool outputWBAMR = false;
    static const int32_t kSampleRate = outputWBAMR ? 16000 : 8000;
    static const int32_t kNumChannels = 1;

    android::ProcessState::self()->startThreadPool();

    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);

#if 0
    sp<MediaSource> source = new SineSource(kSampleRate, kNumChannels);
#else
    sp<MediaSource> source = new AudioSource(
            AUDIO_SOURCE_DEFAULT,
            kSampleRate,
            audio_channel_in_mask_from_count(kNumChannels));
#endif

    sp<MetaData> meta = new MetaData;

    meta->setCString(
            kKeyMIMEType,
            outputWBAMR ? MEDIA_MIMETYPE_AUDIO_AMR_WB
                        : MEDIA_MIMETYPE_AUDIO_AMR_NB);

    meta->setInt32(kKeyChannelCount, kNumChannels);
    meta->setInt32(kKeySampleRate, kSampleRate);

    int32_t maxInputSize;
    if (source->getFormat()->findInt32(kKeyMaxInputSize, &maxInputSize)) {
        meta->setInt32(kKeyMaxInputSize, maxInputSize);
    }

    sp<MediaSource> encoder = OMXCodec::Create(
            client.interface(),
            meta, true /* createEncoder */,
            source);

#if 1
    sp<AMRWriter> writer = new AMRWriter("/sdcard/out.amr");
    writer->addSource(encoder);
    writer->start();
    sleep(10);
    writer->stop();
#else
    sp<MediaSource> decoder = OMXCodec::Create(
            client.interface(),
            meta, false /* createEncoder */,
            encoder);

#if 0
    AudioPlayer *player = new AudioPlayer(NULL);
    player->setSource(decoder);

    player->start();

    sleep(10);

    player->stop();

    delete player;
    player = NULL;
#elif 0
    CHECK_EQ(decoder->start(), (status_t)OK);

    MediaBuffer *buffer;
    while (decoder->read(&buffer) == OK) {
        // do something with buffer

        putchar('.');
        fflush(stdout);

        buffer->release();
        buffer = NULL;
    }

    CHECK_EQ(decoder->stop(), (status_t)OK);
#endif
#endif

    return 0;
}
//--------------------------------------------------
// Event handlers
void AacBqToPcmCbRenderer::onPrepare() {
    SL_LOGD("AacBqToPcmCbRenderer::onPrepare()");
    Mutex::Autolock _l(mBufferSourceLock);

    // Initialize the PCM format info with the known parameters before the start of the decode
    {
        android::Mutex::Autolock autoLock(mPcmFormatLock);
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_BITSPERSAMPLE] = SL_PCMSAMPLEFORMAT_FIXED_16;
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CONTAINERSIZE] = 16;
        //FIXME not true on all platforms
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_ENDIANNESS] = SL_BYTEORDER_LITTLEENDIAN;
        //    initialization with the default values: they will be replaced by the actual values
        //      once the decoder has figured them out
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = UNKNOWN_NUMCHANNELS;
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = UNKNOWN_SAMPLERATE;
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = UNKNOWN_CHANNELMASK;
    }

    sp<MediaExtractor> extractor = new AacAdtsExtractor(mBqSource);

    // only decoding a single track of data
    const size_t kTrackToDecode = 0;

    sp<MediaSource> source = extractor->getTrack(kTrackToDecode);
    if (source == 0) {
        SL_LOGE("AacBqToPcmCbRenderer::onPrepare: error getting source from extractor");
        notifyPrepared(ERROR_UNSUPPORTED);
        return;
    }
    sp<MetaData> meta = extractor->getTrackMetaData(kTrackToDecode);

    // the audio content is not raw PCM, so we need a decoder
    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);

    source = OMXCodec::Create(
            client.interface(), meta, false /* createEncoder */,
            source);

    if (source == NULL) {
        SL_LOGE("AacBqToPcmCbRenderer::onPrepare: Could not instantiate decoder.");
        notifyPrepared(ERROR_UNSUPPORTED);
        return;
    }

    meta = source->getFormat();

    SL_LOGD("AacBqToPcmCbRenderer::onPrepare() after instantiating decoder");

    if (source->start() != OK) {
        SL_LOGE("AacBqToPcmCbRenderer::onPrepare() Failed to start source/decoder.");
        notifyPrepared(MEDIA_ERROR_BASE);
        return;
    }

    //---------------------------------
    int32_t channelCount;
    CHECK(meta->findInt32(kKeyChannelCount, &channelCount));
    int32_t sr;
    CHECK(meta->findInt32(kKeySampleRate, &sr));
    // FIXME similar to AudioSfDecoder::onPrepare()

    // already "good to go" (compare to AudioSfDecoder::onPrepare)
    mCacheStatus = kStatusHigh;
    mCacheFill = 1000;
    notifyStatus();
    notifyCacheFill();

    {
        android::Mutex::Autolock autoLock(mPcmFormatLock);
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_SAMPLERATE] = sr;
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_NUMCHANNELS] = channelCount;
        mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] =
                channelCountToMask(channelCount);
    }
    SL_LOGV("AacBqToPcmCbRenderer::onPrepare() channel count=%d SR=%d",
            channelCount, sr);

    //---------------------------------
    // The data source, and audio source (a decoder) are ready to be used
    mDataSource = mBqSource;
    mAudioSource = source;
    mAudioSourceStarted = true;

    //-------------------------------------
    // signal successful completion of prepare
    mStateFlags |= kFlagPrepared;

    // skipping past AudioToCbRenderer and AudioSfDecoder
    GenericPlayer::onPrepare();

    SL_LOGD("AacBqToPcmCbRenderer::onPrepare() done, mStateFlags=0x%x", mStateFlags);
}
bool OmxDecoder::Init() {
  //register sniffers, if they are not registered in this process.
  DataSource::RegisterDefaultSniffers();

  sp<DataSource> dataSource = new MediaStreamSource(mPluginHost, mDecoder);
  if (dataSource->initCheck()) {
    return false;
  }

  mPluginHost->SetMetaDataReadMode(mDecoder);

  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
  if (extractor == NULL) {
    return false;
  }

  ssize_t audioTrackIndex = -1;
  ssize_t videoTrackIndex = -1;
  const char *audioMime = NULL;
  const char *videoMime = NULL;

  for (size_t i = 0; i < extractor->countTracks(); ++i) {
    sp<MetaData> meta = extractor->getTrackMetaData(i);

    const char *mime;
    if (!meta->findCString(kKeyMIMEType, &mime)) {
      continue;
    }

    if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
      videoTrackIndex = i;
      videoMime = mime;
    } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
      audioTrackIndex = i;
      audioMime = mime;
    }
  }

  if (videoTrackIndex == -1 && audioTrackIndex == -1) {
    return false;
  }

  mPluginHost->SetPlaybackReadMode(mDecoder);

  int64_t totalDurationUs = 0;

#ifdef MOZ_WIDGET_GONK
  sp<IOMX> omx = GetOMX();
#else
  // OMXClient::connect() always returns OK and abort's fatally if
  // it can't connect. We may need to implement the connect functionality
  // ourselves if this proves to be an issue.
  if (mClient.connect() != OK) {
    LOG("OMXClient failed to connect");
  }
  sp<IOMX> omx = mClient.interface();
#endif

  sp<MediaSource> videoTrack;
  sp<MediaSource> videoSource;
  if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != NULL) {
    uint32_t flags = GetVideoCreationFlags(mPluginHost);
    videoSource = OMXCodec::Create(omx,
                                   videoTrack->getFormat(),
                                   false, // decoder
                                   videoTrack,
                                   NULL,
                                   flags);
    if (videoSource == NULL) {
      LOG("OMXCodec failed to initialize video decoder for \"%s\"", videoMime);
      return false;
    }

    status_t status = videoSource->start();
    if (status != OK) {
      LOG("videoSource->start() failed with status %#x", status);
      return false;
    }

    int64_t durationUs;
    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs < 0)
        LOG("video duration %lld should be nonnegative", durationUs);
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  sp<MediaSource> audioTrack;
  sp<MediaSource> audioSource;
  if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != NULL)
  {
    if (!strcasecmp(audioMime, "audio/raw")) {
      audioSource = audioTrack;
    } else {
      audioSource = OMXCodec::Create(omx,
                                     audioTrack->getFormat(),
                                     false, // decoder
                                     audioTrack);
    }

    if (audioSource == NULL) {
      LOG("OMXCodec failed to initialize audio decoder for \"%s\"", audioMime);
      return false;
    }

    status_t status = audioSource->start();
    if (status != OK) {
      LOG("audioSource->start() failed with status %#x", status);
      return false;
    }

    int64_t durationUs;
    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs < 0)
        LOG("audio duration %lld should be nonnegative", durationUs);
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  // set decoder state
  mVideoTrack = videoTrack;
  mVideoSource = videoSource;
  mAudioTrack = audioTrack;
  mAudioSource = audioSource;
  mDurationUs = totalDurationUs;

  if (mVideoSource.get() && !SetVideoFormat())
    return false;

  // To reliably get the channel and sample rate data we need to read from the
  // audio source until we get a INFO_FORMAT_CHANGE status
  if (mAudioSource.get()) {
    if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) {
      sp<MetaData> meta = mAudioSource->getFormat();
      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
        return false;
      }
      mAudioMetadataRead = true;

      if (mAudioChannels < 0) {
        LOG("audio channel count %d must be nonnegative", mAudioChannels);
        return false;
      }

      if (mAudioSampleRate < 0) {
        LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
        return false;
      }
    }
    else if (!SetAudioFormat()) {
        return false;
    }
  }
  return true;
}
status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
    source->clear();

    status_t err = setupCameraSource();
    if (err != OK) return err;

    sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
    CHECK(cameraSource != NULL);

    sp<MetaData> enc_meta = new MetaData;
    enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
    enc_meta->setInt32(kKeySampleRate, mFrameRate);

    switch (mVideoEncoder) {
        case VIDEO_ENCODER_H263:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
            break;

        case VIDEO_ENCODER_MPEG_4_SP:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
            break;

        case VIDEO_ENCODER_H264:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
            break;

        default:
            CHECK(!"Should not be here, unsupported video encoding.");
            break;
    }

    sp<MetaData> meta = cameraSource->getFormat();

    int32_t width, height, stride, sliceHeight, colorFormat;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));
    CHECK(meta->findInt32(kKeyStride, &stride));
    CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
    CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
#if defined (TARGET_OMAP4)
    int32_t paddedWidth, paddedHeight;
    CHECK(meta->findInt32(kKeyPaddedWidth, &paddedWidth));
    CHECK(meta->findInt32(kKeyPaddedHeight, &paddedHeight));
#endif

    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);
    enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
    enc_meta->setInt32(kKeyStride, stride);
    enc_meta->setInt32(kKeySliceHeight, sliceHeight);
    enc_meta->setInt32(kKeyColorFormat, colorFormat);
#if defined (TARGET_OMAP4)
    enc_meta->setInt32(kKeyPaddedWidth, paddedWidth);
    enc_meta->setInt32(kKeyPaddedHeight, paddedHeight);
#endif

    if (mVideoTimeScale > 0) {
        enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
    }
    if (mVideoEncoderProfile != -1) {
        enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
    }
    if (mVideoEncoderLevel != -1) {
        enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
    }

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    sp<MediaSource> encoder = OMXCodec::Create(
            client.interface(), enc_meta,
            true /* createEncoder */, cameraSource);
    if (encoder == NULL) {
        return UNKNOWN_ERROR;
    }

    *source = encoder;

    return OK;
}
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    DataSource::RegisterDefaultSniffers();

#if 1
    if (argc != 2) {
        fprintf(stderr, "usage: %s filename\n", argv[0]);
        return 1;
    }

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

#if 1
    sp<MediaSource> source = createSource(argv[1]);

    if (source == NULL) {
        fprintf(stderr, "Unable to find a suitable video track.\n");
        return 1;
    }

    sp<MetaData> meta = source->getFormat();

    sp<MediaSource> decoder = OMXCodec::Create(
            client.interface(), meta, false /* createEncoder */, source);

    int width, height;
    bool success = meta->findInt32(kKeyWidth, &width);
    success = success && meta->findInt32(kKeyHeight, &height);
    CHECK(success);
#else
    int width = 800;
    int height = 480;
    sp<MediaSource> decoder = new DummySource(width, height);
#endif

    sp<MetaData> enc_meta = new MetaData;
    // enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
    enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);

    sp<MediaSource> encoder =
        OMXCodec::Create(
                client.interface(), enc_meta, true /* createEncoder */, decoder);

#if 1
    sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/output.mp4");
    writer->addSource(encoder);
    writer->start();
    while (!writer->reachedEOS()) {
        usleep(100000);
    }
    writer->stop();
#else
    encoder->start();

    MediaBuffer *buffer;
    while (encoder->read(&buffer) == OK) {
        int32_t isSync;
        if (!buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)) {
            isSync = false;
        }

        printf("got an output frame of size %d%s\n", buffer->range_length(),
               isSync ? " (SYNC)" : "");

        buffer->release();
        buffer = NULL;
    }

    encoder->stop();
#endif

    client.disconnect();
#endif

#if 0
    CameraSource *source = CameraSource::Create();
    printf("source = %p\n", source);

    for (int i = 0; i < 100; ++i) {
        MediaBuffer *buffer;
        status_t err = source->read(&buffer);
        CHECK_EQ(err, OK);

        printf("got a frame, data=%p, size=%d\n",
               buffer->data(), buffer->range_length());

        buffer->release();
        buffer = NULL;
    }

    delete source;
    source = NULL;
#endif

    return 0;
}
示例#19
0
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    DataSource::RegisterDefaultSniffers();

    const char *rtpFilename = NULL;
    const char *rtcpFilename = NULL;

    if (argc == 3) {
        rtpFilename = argv[1];
        rtcpFilename = argv[2];
    } else if (argc != 1) {
        fprintf(stderr, "usage: %s [ rtpFilename rtcpFilename ]\n", argv[0]);
        return 1;
    }

#if 0
    static const uint8_t kSPS[] = {
        0x67, 0x42, 0x80, 0x0a, 0xe9, 0x02, 0x83, 0xe4, 0x20, 0x00, 0x00, 0x7d, 0x00, 0x00, 0x0e, 0xa6, 0x00, 0x80
    };
    static const uint8_t kPPS[] = {
        0x68, 0xce, 0x3c, 0x80
    };
    AString out1, out2;
    encodeBase64(kSPS, sizeof(kSPS), &out1);
    encodeBase64(kPPS, sizeof(kPPS), &out2);
    printf("params=%s,%s\n", out1.c_str(), out2.c_str());
#endif

    sp<ALooper> looper = new ALooper;

    sp<UDPPusher> rtp_pusher;
    sp<UDPPusher> rtcp_pusher;

    if (rtpFilename != NULL) {
        rtp_pusher = new UDPPusher(rtpFilename, 5434);
        looper->registerHandler(rtp_pusher);

        rtcp_pusher = new UDPPusher(rtcpFilename, 5435);
        looper->registerHandler(rtcp_pusher);
    }

    sp<ARTPSession> session = new ARTPSession;
    looper->registerHandler(session);

#if 0
    // My H264 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=0-315\r\n"
        "a=isma-compliance:2,2.0,2\r\n"
        "m=video 5434 RTP/AVP 97\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:30\r\n"
        "a=rtpmap:97 H264/90000\r\n"
        "a=fmtp:97 packetization-mode=1;profile-level-id=42000C;"
          "sprop-parameter-sets=Z0IADJZUCg+I,aM44gA==\r\n"
        "a=mpeg4-esid:201\r\n"
        "a=cliprect:0,0,240,320\r\n"
        "a=framesize:97 320-240\r\n";
#elif 0
    // My H263 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=0-315\r\n"
        "a=isma-compliance:2,2.0,2\r\n"
        "m=video 5434 RTP/AVP 97\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:30\r\n"
        "a=rtpmap:97 H263-1998/90000\r\n"
        "a=cliprect:0,0,240,320\r\n"
        "a=framesize:97 320-240\r\n";
#elif 0
    // My AMR SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=0-315\r\n"
        "a=isma-compliance:2,2.0,2\r\n"
        "m=audio 5434 RTP/AVP 97\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:30\r\n"
        "a=rtpmap:97 AMR/8000/1\r\n"
        "a=fmtp:97 octet-align\r\n";
#elif 1
    // GTalk's H264 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=now-\r\n"
        "m=video 5434 RTP/AVP 96\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:320000\r\n"
        "a=rtpmap:96 H264/90000\r\n"
        "a=fmtp:96 packetization-mode=1;profile-level-id=42001E;"
          "sprop-parameter-sets=Z0IAHpZUBaHogA==,aM44gA==\r\n"
        "a=cliprect:0,0,480,270\r\n"
        "a=framesize:96 720-480\r\n";
#else
    // sholes H264 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=now-\r\n"
        "m=video 5434 RTP/AVP 96\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:320000\r\n"
        "a=rtpmap:96 H264/90000\r\n"
        "a=fmtp:96 packetization-mode=1;profile-level-id=42001E;"
          "sprop-parameter-sets=Z0KACukCg+QgAAB9AAAOpgCA,aM48gA==\r\n"
        "a=cliprect:0,0,240,320\r\n"
        "a=framesize:96 320-240\r\n";
#endif

    sp<ASessionDescription> desc = new ASessionDescription;
    CHECK(desc->setTo(raw, strlen(raw)));

    CHECK_EQ(session->setup(desc), (status_t)OK);

    if (rtp_pusher != NULL) {
        rtp_pusher->start();
    }

    if (rtcp_pusher != NULL) {
        rtcp_pusher->start();
    }

    looper->start(false /* runOnCallingThread */);

    CHECK_EQ(session->countTracks(), 1u);
    sp<MediaSource> source = session->trackAt(0);

    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);

    sp<MediaSource> decoder = OMXCodec::Create(
            client.interface(),
            source->getFormat(), false /* createEncoder */,
            source,
            NULL,
            0);  // OMXCodec::kPreferSoftwareCodecs);
    CHECK(decoder != NULL);

    CHECK_EQ(decoder->start(), (status_t)OK);

    for (;;) {
        MediaBuffer *buffer;
        status_t err = decoder->read(&buffer);

        if (err != OK) {
            if (err == INFO_FORMAT_CHANGED) {
                int32_t width, height;
                CHECK(decoder->getFormat()->findInt32(kKeyWidth, &width));
                CHECK(decoder->getFormat()->findInt32(kKeyHeight, &height));
                printf("INFO_FORMAT_CHANGED %d x %d\n", width, height);
                continue;
            }

            LOGE("decoder returned error 0x%08x", err);
            break;
        }

#if 1
        if (buffer->range_length() != 0) {
            int64_t timeUs;
            CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));

            printf("decoder returned frame of size %d at time %.2f secs\n",
                   buffer->range_length(), timeUs / 1E6);
        }
#endif

        buffer->release();
        buffer = NULL;
    }

    CHECK_EQ(decoder->stop(), (status_t)OK);

    looper->stop();

    return 0;
}
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    bool audioOnly = false;
    bool listComponents = false;
    bool dumpProfiles = false;
    bool extractThumbnail = false;
    bool seekTest = false;
    bool useSurfaceAlloc = false;
    bool useSurfaceTexAlloc = false;
    bool dumpStream = false;
    bool dumpPCMStream = false;
    String8 dumpStreamFilename;
    gNumRepetitions = 1;
    gMaxNumFrames = 0;
    gReproduceBug = -1;
    gPreferSoftwareCodec = false;
    gForceToUseHardwareCodec = false;
    gPlaybackAudio = false;
    gWriteMP4 = false;
    gDisplayHistogram = false;

    sp<ALooper> looper;
    sp<LiveSession> liveSession;

    int res;
    while ((res = getopt(argc, argv, "han:lm:b:ptsrow:kxSTd:D:")) >= 0) {
        switch (res) {
            case 'a':
            {
                audioOnly = true;
                break;
            }

            case 'd':
            {
                dumpStream = true;
                dumpStreamFilename.setTo(optarg);
                break;
            }

            case 'D':
            {
                dumpPCMStream = true;
                audioOnly = true;
                dumpStreamFilename.setTo(optarg);
                break;
            }

            case 'l':
            {
                listComponents = true;
                break;
            }

            case 'm':
            case 'n':
            case 'b':
            {
                char *end;
                long x = strtol(optarg, &end, 10);

                if (*end != '\0' || end == optarg || x <= 0) {
                    x = 1;
                }

                if (res == 'n') {
                    gNumRepetitions = x;
                } else if (res == 'm') {
                    gMaxNumFrames = x;
                } else {
                    CHECK_EQ(res, 'b');
                    gReproduceBug = x;
                }
                break;
            }

            case 'w':
            {
                gWriteMP4 = true;
                gWriteMP4Filename.setTo(optarg);
                break;
            }

            case 'p':
            {
                dumpProfiles = true;
                break;
            }

            case 't':
            {
                extractThumbnail = true;
                break;
            }

            case 's':
            {
                gPreferSoftwareCodec = true;
                break;
            }

            case 'r':
            {
                gForceToUseHardwareCodec = true;
                break;
            }

            case 'o':
            {
                gPlaybackAudio = true;
                break;
            }

            case 'k':
            {
                seekTest = true;
                break;
            }

            case 'x':
            {
                gDisplayHistogram = true;
                break;
            }

            case 'S':
            {
                useSurfaceAlloc = true;
                break;
            }

            case 'T':
            {
                useSurfaceTexAlloc = true;
                break;
            }

            case '?':
            case 'h':
            default:
            {
                usage(argv[0]);
                exit(1);
                break;
            }
        }
    }

    if (gPlaybackAudio && !audioOnly) {
        // This doesn't make any sense if we're decoding the video track.
        gPlaybackAudio = false;
    }

    argc -= optind;
    argv += optind;

    if (extractThumbnail) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IMediaMetadataRetriever> retriever =
            service->createMetadataRetriever();

        CHECK(retriever != NULL);

        for (int k = 0; k < argc; ++k) {
            const char *filename = argv[k];

            bool failed = true;

            int fd = open(filename, O_RDONLY | O_LARGEFILE);
            CHECK_GE(fd, 0);

            off64_t fileSize = lseek64(fd, 0, SEEK_END);
            CHECK_GE(fileSize, 0ll);

            CHECK_EQ(retriever->setDataSource(fd, 0, fileSize), (status_t)OK);

            close(fd);
            fd = -1;

            sp<IMemory> mem =
                    retriever->getFrameAtTime(-1,
                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);

            if (mem != NULL) {
                failed = false;
                printf("getFrameAtTime(%s) => OK\n", filename);

                VideoFrame *frame = (VideoFrame *)mem->pointer();

                CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
                            (uint8_t *)frame + sizeof(VideoFrame),
                            frame->mWidth, frame->mHeight), 0);
            }

            {
                mem = retriever->extractAlbumArt();

                if (mem != NULL) {
                    failed = false;
                    printf("extractAlbumArt(%s) => OK\n", filename);
                }
            }

            if (failed) {
                printf("both getFrameAtTime and extractAlbumArt "
                    "failed on file '%s'.\n", filename);
            }
        }

        return 0;
    }

    if (dumpProfiles) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);
        dumpCodecProfiles(omx, true /* queryDecoders */);
        dumpCodecProfiles(omx, false /* queryDecoders */);
    }

    if (listComponents) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);

        List<IOMX::ComponentInfo> list;
        omx->listNodes(&list);

        for (List<IOMX::ComponentInfo>::iterator it = list.begin();
             it != list.end(); ++it) {
            printf("%s\t Roles: ", (*it).mName.string());
            for (List<String8>::iterator itRoles = (*it).mRoles.begin() ;
                    itRoles != (*it).mRoles.end() ; ++itRoles) {
                printf("%s\t", (*itRoles).string());
            }
            printf("\n");
        }
    }

    sp<SurfaceComposerClient> composerClient;
    sp<SurfaceControl> control;

    if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
        if (useSurfaceAlloc) {
            composerClient = new SurfaceComposerClient;
            CHECK_EQ(composerClient->initCheck(), (status_t)OK);

            control = composerClient->createSurface(
                    String8("A Surface"),
                    1280,
                    800,
                    PIXEL_FORMAT_RGB_565,
                    0);

            CHECK(control != NULL);
            CHECK(control->isValid());

            SurfaceComposerClient::openGlobalTransaction();
            CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
            CHECK_EQ(control->show(), (status_t)OK);
            SurfaceComposerClient::closeGlobalTransaction();

            gSurface = control->getSurface();
            CHECK(gSurface != NULL);
        } else {
            CHECK(useSurfaceTexAlloc);

            sp<GLConsumer> texture = new GLConsumer(0 /* tex */);
            gSurface = new Surface(texture->getBufferQueue());
        }

        CHECK_EQ((status_t)OK,
                 native_window_api_connect(
                     gSurface.get(), NATIVE_WINDOW_API_MEDIA));
    }

    DataSource::RegisterDefaultSniffers();

    OMXClient client;
    status_t err = client.connect();

    for (int k = 0; k < argc; ++k) {
        bool syncInfoPresent = true;

        const char *filename = argv[k];

        sp<DataSource> dataSource = DataSource::CreateFromURI(filename);

        if (strncasecmp(filename, "sine:", 5)
                && strncasecmp(filename, "httplive://", 11)
                && dataSource == NULL) {
            fprintf(stderr, "Unable to create data source.\n");
            return 1;
        }

        bool isJPEG = false;

        size_t len = strlen(filename);
        if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
            isJPEG = true;
        }

        Vector<sp<MediaSource> > mediaSources;
        sp<MediaSource> mediaSource;

        if (isJPEG) {
            mediaSource = new JPEGSource(dataSource);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else if (!strncasecmp("sine:", filename, 5)) {
            char *end;
            long sampleRate = strtol(filename + 5, &end, 10);

            if (end == filename + 5) {
                sampleRate = 44100;
            }
            mediaSource = new SineSource(sampleRate, 1);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else {
            sp<MediaExtractor> extractor;

            if (!strncasecmp("httplive://", filename, 11)) {
                String8 uri("http://");
                uri.append(filename + 11);

                if (looper == NULL) {
                    looper = new ALooper;
                    looper->start();
                }
                liveSession = new LiveSession(NULL /* notify */);
                looper->registerHandler(liveSession);

                liveSession->connect(uri.string());
                dataSource = liveSession->getDataSource();

                extractor =
                    MediaExtractor::Create(
                            dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);

                syncInfoPresent = false;
            } else {
                extractor = MediaExtractor::Create(dataSource);

                if (extractor == NULL) {
                    fprintf(stderr, "could not create extractor.\n");
                    return -1;
                }

                sp<MetaData> meta = extractor->getMetaData();

                if (meta != NULL) {
                    const char *mime;
                    CHECK(meta->findCString(kKeyMIMEType, &mime));

                    if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
                        syncInfoPresent = false;
                    }
                }
            }

            size_t numTracks = extractor->countTracks();

            if (gWriteMP4) {
                bool haveAudio = false;
                bool haveVideo = false;
                for (size_t i = 0; i < numTracks; ++i) {
                    sp<MediaSource> source = extractor->getTrack(i);

                    const char *mime;
                    CHECK(source->getFormat()->findCString(
                                kKeyMIMEType, &mime));

                    bool useTrack = false;
                    if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
                        haveAudio = true;
                        useTrack = true;
                    } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
                        haveVideo = true;
                        useTrack = true;
                    }

                    if (useTrack) {
                        mediaSources.push(source);

                        if (haveAudio && haveVideo) {
                            break;
                        }
                    }
                }
            } else {
                sp<MetaData> meta;
                size_t i;
                for (i = 0; i < numTracks; ++i) {
                    meta = extractor->getTrackMetaData(
                            i, MediaExtractor::kIncludeExtensiveMetaData);

                    const char *mime;
                    meta->findCString(kKeyMIMEType, &mime);

                    if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
                        break;
                    }

                    if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
                        break;
                    }

                    meta = NULL;
                }

                if (meta == NULL) {
                    fprintf(stderr,
                            "No suitable %s track found. The '-a' option will "
                            "target audio tracks only, the default is to target "
                            "video tracks only.\n",
                            audioOnly ? "audio" : "video");
                    return -1;
                }

                int64_t thumbTimeUs;
                if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
                    printf("thumbnailTime: %lld us (%.2f secs)\n",
                           thumbTimeUs, thumbTimeUs / 1E6);
                }

                mediaSource = extractor->getTrack(i);
            }
        }

        if (gWriteMP4) {
            writeSourcesToMP4(mediaSources, syncInfoPresent);
        } else if (dumpStream) {
            dumpSource(mediaSource, dumpStreamFilename);
        } else if (dumpPCMStream) {
            OMXClient client;
            CHECK_EQ(client.connect(), (status_t)OK);

            sp<MediaSource> decSource =
                OMXCodec::Create(
                        client.interface(),
                        mediaSource->getFormat(),
                        false,
                        mediaSource,
                        0,
                        0);

            dumpSource(decSource, dumpStreamFilename);
        } else if (seekTest) {
            performSeekTest(mediaSource);
        } else {
            playSource(&client, mediaSource);
        }
    }

    if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
        CHECK_EQ((status_t)OK,
                 native_window_api_disconnect(
                     gSurface.get(), NATIVE_WINDOW_API_MEDIA));

        gSurface.clear();

        if (useSurfaceAlloc) {
            composerClient->dispose();
        }
    }

    client.disconnect();

    return 0;
}
示例#21
0
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    bool audioOnly = false;
    bool listComponents = false;
    bool dumpProfiles = false;
    bool extractThumbnail = false;
    bool seekTest = false;
    gNumRepetitions = 1;
    gMaxNumFrames = 0;
    gReproduceBug = -1;
    gPreferSoftwareCodec = false;
    gPlaybackAudio = false;
    gWriteMP4 = false;

    sp<ALooper> looper;
    sp<ARTSPController> rtspController;

    int res;
    while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) {
        switch (res) {
            case 'a':
            {
                audioOnly = true;
                break;
            }

            case 'l':
            {
                listComponents = true;
                break;
            }

            case 'm':
            case 'n':
            case 'b':
            {
                char *end;
                long x = strtol(optarg, &end, 10);

                if (*end != '\0' || end == optarg || x <= 0) {
                    x = 1;
                }

                if (res == 'n') {
                    gNumRepetitions = x;
                } else if (res == 'm') {
                    gMaxNumFrames = x;
                } else {
                    CHECK_EQ(res, 'b');
                    gReproduceBug = x;
                }
                break;
            }

            case 'w':
            {
                gWriteMP4 = true;
                gWriteMP4Filename.setTo(optarg);
                break;
            }

            case 'p':
            {
                dumpProfiles = true;
                break;
            }

            case 't':
            {
                extractThumbnail = true;
                break;
            }

            case 's':
            {
                gPreferSoftwareCodec = true;
                break;
            }

            case 'o':
            {
                gPlaybackAudio = true;
                break;
            }

            case 'k':
            {
                seekTest = true;
                break;
            }

            case '?':
            case 'h':
            default:
            {
                usage(argv[0]);
                exit(1);
                break;
            }
        }
    }

    if (gPlaybackAudio && !audioOnly) {
        // This doesn't make any sense if we're decoding the video track.
        gPlaybackAudio = false;
    }

    argc -= optind;
    argv += optind;

    if (extractThumbnail) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IMediaMetadataRetriever> retriever =
            service->createMetadataRetriever(getpid());

        CHECK(retriever != NULL);

        for (int k = 0; k < argc; ++k) {
            const char *filename = argv[k];

            CHECK_EQ(retriever->setDataSource(filename), (status_t)OK);
            sp<IMemory> mem =
                    retriever->getFrameAtTime(-1,
                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);

            if (mem != NULL) {
                printf("getFrameAtTime(%s) => OK\n", filename);
            } else {
                mem = retriever->extractAlbumArt();

                if (mem != NULL) {
                    printf("extractAlbumArt(%s) => OK\n", filename);
                } else {
                    printf("both getFrameAtTime and extractAlbumArt "
                           "failed on file '%s'.\n", filename);
                }
            }
        }

        return 0;
    }

    if (dumpProfiles) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);

        const char *kMimeTypes[] = {
            MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
            MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AAC,
            MEDIA_MIMETYPE_AUDIO_AMR_NB, MEDIA_MIMETYPE_AUDIO_AMR_WB,
            MEDIA_MIMETYPE_AUDIO_MPEG
        };

        for (size_t k = 0; k < sizeof(kMimeTypes) / sizeof(kMimeTypes[0]);
             ++k) {
            printf("type '%s':\n", kMimeTypes[k]);

            Vector<CodecCapabilities> results;
            CHECK_EQ(QueryCodecs(omx, kMimeTypes[k],
                                 true, // queryDecoders
                                 &results), (status_t)OK);

            for (size_t i = 0; i < results.size(); ++i) {
                printf("  decoder '%s' supports ",
                       results[i].mComponentName.string());

                if (results[i].mProfileLevels.size() == 0) {
                    printf("NOTHING.\n");
                    continue;
                }

                for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) {
                    const CodecProfileLevel &profileLevel =
                        results[i].mProfileLevels[j];

                    printf("%s%ld/%ld", j > 0 ? ", " : "",
                           profileLevel.mProfile, profileLevel.mLevel);
                }

                printf("\n");
            }
        }
    }

    if (listComponents) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);

        List<IOMX::ComponentInfo> list;
        omx->listNodes(&list);

        for (List<IOMX::ComponentInfo>::iterator it = list.begin();
             it != list.end(); ++it) {
            printf("%s\n", (*it).mName.string());
        }
    }

    DataSource::RegisterDefaultSniffers();

    OMXClient client;
    status_t err = client.connect();

    for (int k = 0; k < argc; ++k) {
        bool syncInfoPresent = true;

        const char *filename = argv[k];

        sp<DataSource> dataSource = DataSource::CreateFromURI(filename);

        if (strncasecmp(filename, "sine:", 5)
                && strncasecmp(filename, "rtsp://", 7)
                && strncasecmp(filename, "httplive://", 11)
                && dataSource == NULL) {
            fprintf(stderr, "Unable to create data source.\n");
            return 1;
        }

        bool isJPEG = false;

        size_t len = strlen(filename);
        if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
            isJPEG = true;
        }

        Vector<sp<MediaSource> > mediaSources;
        sp<MediaSource> mediaSource;

        if (isJPEG) {
            mediaSource = new JPEGSource(dataSource);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else if (!strncasecmp("sine:", filename, 5)) {
            char *end;
            long sampleRate = strtol(filename + 5, &end, 10);

            if (end == filename + 5) {
                sampleRate = 44100;
            }
            mediaSource = new SineSource(sampleRate, 1);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else {
            sp<MediaExtractor> extractor;

            if (!strncasecmp("rtsp://", filename, 7)) {
                if (looper == NULL) {
                    looper = new ALooper;
                    looper->start();
                }

                rtspController = new ARTSPController(looper);
                status_t err = rtspController->connect(filename);
                if (err != OK) {
                    fprintf(stderr, "could not connect to rtsp server.\n");
                    return -1;
                }

                extractor = rtspController.get();

                syncInfoPresent = false;
            } else if (!strncasecmp("httplive://", filename, 11)) {
                String8 uri("http://");
                uri.append(filename + 11);

                dataSource = new LiveSource(uri.string());
                dataSource = new NuCachedSource2(dataSource);

                extractor =
                    MediaExtractor::Create(
                            dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);

                syncInfoPresent = false;
            } else {
                extractor = MediaExtractor::Create(dataSource);
                if (extractor == NULL) {
                    fprintf(stderr, "could not create extractor.\n");
                    return -1;
                }
            }

            size_t numTracks = extractor->countTracks();

            if (gWriteMP4) {
                bool haveAudio = false;
                bool haveVideo = false;
                for (size_t i = 0; i < numTracks; ++i) {
                    sp<MediaSource> source = extractor->getTrack(i);

                    const char *mime;
                    CHECK(source->getFormat()->findCString(
                                kKeyMIMEType, &mime));

                    bool useTrack = false;
                    if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
                        haveAudio = true;
                        useTrack = true;
                    } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
                        haveVideo = true;
                        useTrack = true;
                    }

                    if (useTrack) {
                        mediaSources.push(source);

                        if (haveAudio && haveVideo) {
                            break;
                        }
                    }
                }
            } else {
                sp<MetaData> meta;
                size_t i;
                for (i = 0; i < numTracks; ++i) {
                    meta = extractor->getTrackMetaData(
                            i, MediaExtractor::kIncludeExtensiveMetaData);

                    const char *mime;
                    meta->findCString(kKeyMIMEType, &mime);

                    if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
                        break;
                    }

                    if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
                        break;
                    }

                    meta = NULL;
                }

                if (meta == NULL) {
                    fprintf(stderr,
                            "No suitable %s track found. The '-a' option will "
                            "target audio tracks only, the default is to target "
                            "video tracks only.\n",
                            audioOnly ? "audio" : "video");
                    return -1;
                }

                int64_t thumbTimeUs;
                if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
                    printf("thumbnailTime: %lld us (%.2f secs)\n",
                           thumbTimeUs, thumbTimeUs / 1E6);
                }

                mediaSource = extractor->getTrack(i);
            }
        }

        if (gWriteMP4) {
            writeSourcesToMP4(mediaSources, syncInfoPresent);
        } else if (seekTest) {
            performSeekTest(mediaSource);
        } else {
            playSource(&client, mediaSource);
        }

        if (rtspController != NULL) {
            rtspController->disconnect();
            rtspController.clear();

            sleep(3);
        }
    }

    client.disconnect();

    return 0;
}