int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    const int32_t kSampleRate = 22050;
    const int32_t kNumChannels = 2;
    sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels);

#if 0
    sp<MediaPlayerBase::AudioSink> audioSink;
    AudioPlayer *player = new AudioPlayer(audioSink);
    player->setSource(audioSource);
    player->start();

    sleep(10);

    player->stop();
#endif

    sp<MetaData> encMeta = new MetaData;
    encMeta->setCString(kKeyMIMEType,
            1 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
    encMeta->setInt32(kKeySampleRate, kSampleRate);
    encMeta->setInt32(kKeyChannelCount, kNumChannels);
    encMeta->setInt32(kKeyMaxInputSize, 8192);

    sp<MediaSource> encoder =
        OMXCodec::Create(client.interface(), encMeta, true, audioSource);

    encoder->start();

    int32_t n = 0;
    status_t err;
    MediaBuffer *buffer;
    while ((err = encoder->read(&buffer)) == OK) {
        printf(".");
        fflush(stdout);

        buffer->release();
        buffer = NULL;

        if (++n == 100) {
            break;
        }
    }
    printf("$\n");

    encoder->stop();

    client.disconnect();

    return 0;
}
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    DataSource::RegisterDefaultSniffers();

#if 1
    if (argc != 2) {
        fprintf(stderr, "usage: %s filename\n", argv[0]);
        return 1;
    }

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

#if 1
    sp<MediaSource> source = createSource(argv[1]);

    if (source == NULL) {
        fprintf(stderr, "Unable to find a suitable video track.\n");
        return 1;
    }

    sp<MetaData> meta = source->getFormat();

    sp<MediaSource> decoder = OMXCodec::Create(
            client.interface(), meta, false /* createEncoder */, source);

    int width, height;
    bool success = meta->findInt32(kKeyWidth, &width);
    success = success && meta->findInt32(kKeyHeight, &height);
    CHECK(success);
#else
    int width = 800;
    int height = 480;
    sp<MediaSource> decoder = new DummySource(width, height);
#endif

    sp<MetaData> enc_meta = new MetaData;
    // enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
    enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);

    sp<MediaSource> encoder =
        OMXCodec::Create(
                client.interface(), enc_meta, true /* createEncoder */, decoder);

#if 1
    sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/output.mp4");
    writer->addSource(encoder);
    writer->start();
    while (!writer->reachedEOS()) {
        usleep(100000);
    }
    writer->stop();
#else
    encoder->start();

    MediaBuffer *buffer;
    while (encoder->read(&buffer) == OK) {
        int32_t isSync;
        if (!buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)) {
            isSync = false;
        }

        printf("got an output frame of size %d%s\n", buffer->range_length(),
               isSync ? " (SYNC)" : "");

        buffer->release();
        buffer = NULL;
    }

    encoder->stop();
#endif

    client.disconnect();
#endif

#if 0
    CameraSource *source = CameraSource::Create();
    printf("source = %p\n", source);

    for (int i = 0; i < 100; ++i) {
        MediaBuffer *buffer;
        status_t err = source->read(&buffer);
        CHECK_EQ(err, OK);

        printf("got a frame, data=%p, size=%d\n",
               buffer->data(), buffer->range_length());

        buffer->release();
        buffer = NULL;
    }

    delete source;
    source = NULL;
#endif

    return 0;
}
M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
        sp<MetaData> metaData) {
    M4OSA_ERR err = M4NO_ERROR;
    VideoEditorVideoEncoder_Context*  pEncoderContext = M4OSA_NULL;
    status_t result = OK;
    int32_t nbBuffer = 0;
    int32_t stride = 0;
    int32_t height = 0;
    int32_t framerate = 0;
    int32_t isCodecConfig = 0;
    size_t size = 0;
    uint32_t codecFlags = 0;
    MediaBuffer* inputBuffer = NULL;
    MediaBuffer* outputBuffer = NULL;
    sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
    sp<MediaSource> encoder = NULL;;
    OMXClient client;

    ALOGV("VideoEditorVideoEncoder_getDSI begin");
    // Input parameters check
    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
    VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);

    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);

    // Create the encoder source
    encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
    VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);

    // Connect to the OMX client
    result = client.connect();
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);

    // Create the OMX codec
    // VIDEOEDITOR_FORCECODEC MUST be defined here
    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
    encoder = OMXCodec::Create(client.interface(), metaData, true,
        encoderSource, NULL, codecFlags);
    VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);

    /**
     * Send fake frames and retrieve the DSI
     */
    // Send a fake frame to the source
    metaData->findInt32(kKeyStride,     &stride);
    metaData->findInt32(kKeyHeight,     &height);
    metaData->findInt32(kKeySampleRate, &framerate);
    size = (size_t)(stride*height*3)/2;
    inputBuffer = new MediaBuffer(size);
    inputBuffer->meta_data()->setInt64(kKeyTime, 0);
    nbBuffer = encoderSource->storeBuffer(inputBuffer);
    encoderSource->storeBuffer(NULL); // Signal EOS

    // Call read once to get the DSI
    result = encoder->start();;
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    result = encoder->read(&outputBuffer, NULL);
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
        kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);

    VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
    if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
        // For H264, format the DSI
        result = buildAVCCodecSpecificData(
            (uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
            (size_t*)(&(pEncoderContext->mHeader.Size)),
            (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
            outputBuffer->range_length(), encoder->getFormat().get());
        outputBuffer->release();
        VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    } else {
        // For MPEG4, just copy the DSI
        pEncoderContext->mHeader.Size =
            (M4OSA_UInt32)outputBuffer->range_length();
        SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
            pEncoderContext->mHeader.Size, "Encoder header");
        memcpy((void *)pEncoderContext->mHeader.pBuf,
            (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
            pEncoderContext->mHeader.Size);
        outputBuffer->release();
    }

    result = encoder->stop();
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);

cleanUp:
    // Destroy the graph
    if ( encoder != NULL ) { encoder.clear(); }
    client.disconnect();
    if ( encoderSource != NULL ) { encoderSource.clear(); }
    if ( M4NO_ERROR == err ) {
        ALOGV("VideoEditorVideoEncoder_getDSI no error");
    } else {
        ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
    }
    ALOGV("VideoEditorVideoEncoder_getDSI end");
    return err;
}
Пример #4
0
int main(int argc, char **argv) {

    // Default values for the program if not overwritten
    int frameRateFps = 30;
    int width = 176;
    int height = 144;
    int bitRateBps = 300000;
    int iFramesIntervalSeconds = 1;
    int colorFormat = OMX_COLOR_FormatYUV420Planar;
    int nFrames = 300;
    int level = -1;        // Encoder specific default
    int profile = -1;      // Encoder specific default
    int codec = 0;
    const char *fileName = "/sdcard/output.mp4";

    android::ProcessState::self()->startThreadPool();
    int res;
    while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
        switch (res) {
            case 'b':
            {
                bitRateBps = atoi(optarg);
                break;
            }

            case 'c':
            {
                colorFormat = translateColorToOmxEnumValue(atoi(optarg));
                if (colorFormat == -1) {
                    usage(argv[0]);
                }
                break;
            }

            case 'f':
            {
                frameRateFps = atoi(optarg);
                break;
            }

            case 'i':
            {
                iFramesIntervalSeconds = atoi(optarg);
                break;
            }

            case 'n':
            {
                nFrames = atoi(optarg);
                break;
            }

            case 'w':
            {
                width = atoi(optarg);
                break;
            }

            case 't':
            {
                height = atoi(optarg);
                break;
            }

            case 'l':
            {
                level = atoi(optarg);
                break;
            }

            case 'p':
            {
                profile = atoi(optarg);
                break;
            }

            case 'v':
            {
                codec = atoi(optarg);
                if (codec < 0 || codec > 2) {
                    usage(argv[0]);
                }
                break;
            }

            case 'h':
            default:
            {
                usage(argv[0]);
                break;
            }
        }
    }

    OMXClient client;
    CHECK_EQ(client.connect(), OK);

    status_t err = OK;
    sp<MediaSource> source =
        new DummySource(width, height, nFrames, frameRateFps, colorFormat);

    sp<MetaData> enc_meta = new MetaData;
    switch (codec) {
        case 1:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
            break;
        case 2:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
            break;
        default:
            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
            break;
    }
    enc_meta->setInt32(kKeyWidth, width);
    enc_meta->setInt32(kKeyHeight, height);
    enc_meta->setInt32(kKeyFrameRate, frameRateFps);
    enc_meta->setInt32(kKeyBitRate, bitRateBps);
    enc_meta->setInt32(kKeyStride, width);
    enc_meta->setInt32(kKeySliceHeight, height);
    enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
    enc_meta->setInt32(kKeyColorFormat, colorFormat);
    if (level != -1) {
        enc_meta->setInt32(kKeyVideoLevel, level);
    }
    if (profile != -1) {
        enc_meta->setInt32(kKeyVideoProfile, profile);
    }

    sp<MediaSource> encoder =
        OMXCodec::Create(
                client.interface(), enc_meta, true /* createEncoder */, source);

    sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
    writer->addSource(encoder);
    int64_t start = systemTime();
    CHECK_EQ(OK, writer->start());
    while (!writer->reachedEOS()) {
    }
    err = writer->stop();
    int64_t end = systemTime();

    fprintf(stderr, "$\n");
    client.disconnect();

    if (err != OK && err != ERROR_END_OF_STREAM) {
        fprintf(stderr, "record failed: %d\n", err);
        return 1;
    }
    fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
    fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
    return 0;
}
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    bool audioOnly = false;
    bool listComponents = false;
    bool dumpProfiles = false;
    bool extractThumbnail = false;
    bool seekTest = false;
    bool useSurfaceAlloc = false;
    bool useSurfaceTexAlloc = false;
    bool dumpStream = false;
    bool dumpPCMStream = false;
    String8 dumpStreamFilename;
    gNumRepetitions = 1;
    gMaxNumFrames = 0;
    gReproduceBug = -1;
    gPreferSoftwareCodec = false;
    gForceToUseHardwareCodec = false;
    gPlaybackAudio = false;
    gWriteMP4 = false;
    gDisplayHistogram = false;

    sp<ALooper> looper;
    sp<LiveSession> liveSession;

    int res;
    while ((res = getopt(argc, argv, "han:lm:b:ptsrow:kxSTd:D:")) >= 0) {
        switch (res) {
            case 'a':
            {
                audioOnly = true;
                break;
            }

            case 'd':
            {
                dumpStream = true;
                dumpStreamFilename.setTo(optarg);
                break;
            }

            case 'D':
            {
                dumpPCMStream = true;
                audioOnly = true;
                dumpStreamFilename.setTo(optarg);
                break;
            }

            case 'l':
            {
                listComponents = true;
                break;
            }

            case 'm':
            case 'n':
            case 'b':
            {
                char *end;
                long x = strtol(optarg, &end, 10);

                if (*end != '\0' || end == optarg || x <= 0) {
                    x = 1;
                }

                if (res == 'n') {
                    gNumRepetitions = x;
                } else if (res == 'm') {
                    gMaxNumFrames = x;
                } else {
                    CHECK_EQ(res, 'b');
                    gReproduceBug = x;
                }
                break;
            }

            case 'w':
            {
                gWriteMP4 = true;
                gWriteMP4Filename.setTo(optarg);
                break;
            }

            case 'p':
            {
                dumpProfiles = true;
                break;
            }

            case 't':
            {
                extractThumbnail = true;
                break;
            }

            case 's':
            {
                gPreferSoftwareCodec = true;
                break;
            }

            case 'r':
            {
                gForceToUseHardwareCodec = true;
                break;
            }

            case 'o':
            {
                gPlaybackAudio = true;
                break;
            }

            case 'k':
            {
                seekTest = true;
                break;
            }

            case 'x':
            {
                gDisplayHistogram = true;
                break;
            }

            case 'S':
            {
                useSurfaceAlloc = true;
                break;
            }

            case 'T':
            {
                useSurfaceTexAlloc = true;
                break;
            }

            case '?':
            case 'h':
            default:
            {
                usage(argv[0]);
                exit(1);
                break;
            }
        }
    }

    if (gPlaybackAudio && !audioOnly) {
        // This doesn't make any sense if we're decoding the video track.
        gPlaybackAudio = false;
    }

    argc -= optind;
    argv += optind;

    if (extractThumbnail) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IMediaMetadataRetriever> retriever =
            service->createMetadataRetriever();

        CHECK(retriever != NULL);

        for (int k = 0; k < argc; ++k) {
            const char *filename = argv[k];

            bool failed = true;

            int fd = open(filename, O_RDONLY | O_LARGEFILE);
            CHECK_GE(fd, 0);

            off64_t fileSize = lseek64(fd, 0, SEEK_END);
            CHECK_GE(fileSize, 0ll);

            CHECK_EQ(retriever->setDataSource(fd, 0, fileSize), (status_t)OK);

            close(fd);
            fd = -1;

            sp<IMemory> mem =
                    retriever->getFrameAtTime(-1,
                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);

            if (mem != NULL) {
                failed = false;
                printf("getFrameAtTime(%s) => OK\n", filename);

                VideoFrame *frame = (VideoFrame *)mem->pointer();

                CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
                            (uint8_t *)frame + sizeof(VideoFrame),
                            frame->mWidth, frame->mHeight), 0);
            }

            {
                mem = retriever->extractAlbumArt();

                if (mem != NULL) {
                    failed = false;
                    printf("extractAlbumArt(%s) => OK\n", filename);
                }
            }

            if (failed) {
                printf("both getFrameAtTime and extractAlbumArt "
                    "failed on file '%s'.\n", filename);
            }
        }

        return 0;
    }

    if (dumpProfiles) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);
        dumpCodecProfiles(omx, true /* queryDecoders */);
        dumpCodecProfiles(omx, false /* queryDecoders */);
    }

    if (listComponents) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);

        List<IOMX::ComponentInfo> list;
        omx->listNodes(&list);

        for (List<IOMX::ComponentInfo>::iterator it = list.begin();
             it != list.end(); ++it) {
            printf("%s\t Roles: ", (*it).mName.string());
            for (List<String8>::iterator itRoles = (*it).mRoles.begin() ;
                    itRoles != (*it).mRoles.end() ; ++itRoles) {
                printf("%s\t", (*itRoles).string());
            }
            printf("\n");
        }
    }

    sp<SurfaceComposerClient> composerClient;
    sp<SurfaceControl> control;

    if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
        if (useSurfaceAlloc) {
            composerClient = new SurfaceComposerClient;
            CHECK_EQ(composerClient->initCheck(), (status_t)OK);

            control = composerClient->createSurface(
                    String8("A Surface"),
                    1280,
                    800,
                    PIXEL_FORMAT_RGB_565,
                    0);

            CHECK(control != NULL);
            CHECK(control->isValid());

            SurfaceComposerClient::openGlobalTransaction();
            CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
            CHECK_EQ(control->show(), (status_t)OK);
            SurfaceComposerClient::closeGlobalTransaction();

            gSurface = control->getSurface();
            CHECK(gSurface != NULL);
        } else {
            CHECK(useSurfaceTexAlloc);

            sp<GLConsumer> texture = new GLConsumer(0 /* tex */);
            gSurface = new Surface(texture->getBufferQueue());
        }

        CHECK_EQ((status_t)OK,
                 native_window_api_connect(
                     gSurface.get(), NATIVE_WINDOW_API_MEDIA));
    }

    DataSource::RegisterDefaultSniffers();

    OMXClient client;
    status_t err = client.connect();

    for (int k = 0; k < argc; ++k) {
        bool syncInfoPresent = true;

        const char *filename = argv[k];

        sp<DataSource> dataSource = DataSource::CreateFromURI(filename);

        if (strncasecmp(filename, "sine:", 5)
                && strncasecmp(filename, "httplive://", 11)
                && dataSource == NULL) {
            fprintf(stderr, "Unable to create data source.\n");
            return 1;
        }

        bool isJPEG = false;

        size_t len = strlen(filename);
        if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
            isJPEG = true;
        }

        Vector<sp<MediaSource> > mediaSources;
        sp<MediaSource> mediaSource;

        if (isJPEG) {
            mediaSource = new JPEGSource(dataSource);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else if (!strncasecmp("sine:", filename, 5)) {
            char *end;
            long sampleRate = strtol(filename + 5, &end, 10);

            if (end == filename + 5) {
                sampleRate = 44100;
            }
            mediaSource = new SineSource(sampleRate, 1);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else {
            sp<MediaExtractor> extractor;

            if (!strncasecmp("httplive://", filename, 11)) {
                String8 uri("http://");
                uri.append(filename + 11);

                if (looper == NULL) {
                    looper = new ALooper;
                    looper->start();
                }
                liveSession = new LiveSession(NULL /* notify */);
                looper->registerHandler(liveSession);

                liveSession->connect(uri.string());
                dataSource = liveSession->getDataSource();

                extractor =
                    MediaExtractor::Create(
                            dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);

                syncInfoPresent = false;
            } else {
                extractor = MediaExtractor::Create(dataSource);

                if (extractor == NULL) {
                    fprintf(stderr, "could not create extractor.\n");
                    return -1;
                }

                sp<MetaData> meta = extractor->getMetaData();

                if (meta != NULL) {
                    const char *mime;
                    CHECK(meta->findCString(kKeyMIMEType, &mime));

                    if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
                        syncInfoPresent = false;
                    }
                }
            }

            size_t numTracks = extractor->countTracks();

            if (gWriteMP4) {
                bool haveAudio = false;
                bool haveVideo = false;
                for (size_t i = 0; i < numTracks; ++i) {
                    sp<MediaSource> source = extractor->getTrack(i);

                    const char *mime;
                    CHECK(source->getFormat()->findCString(
                                kKeyMIMEType, &mime));

                    bool useTrack = false;
                    if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
                        haveAudio = true;
                        useTrack = true;
                    } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
                        haveVideo = true;
                        useTrack = true;
                    }

                    if (useTrack) {
                        mediaSources.push(source);

                        if (haveAudio && haveVideo) {
                            break;
                        }
                    }
                }
            } else {
                sp<MetaData> meta;
                size_t i;
                for (i = 0; i < numTracks; ++i) {
                    meta = extractor->getTrackMetaData(
                            i, MediaExtractor::kIncludeExtensiveMetaData);

                    const char *mime;
                    meta->findCString(kKeyMIMEType, &mime);

                    if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
                        break;
                    }

                    if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
                        break;
                    }

                    meta = NULL;
                }

                if (meta == NULL) {
                    fprintf(stderr,
                            "No suitable %s track found. The '-a' option will "
                            "target audio tracks only, the default is to target "
                            "video tracks only.\n",
                            audioOnly ? "audio" : "video");
                    return -1;
                }

                int64_t thumbTimeUs;
                if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
                    printf("thumbnailTime: %lld us (%.2f secs)\n",
                           thumbTimeUs, thumbTimeUs / 1E6);
                }

                mediaSource = extractor->getTrack(i);
            }
        }

        if (gWriteMP4) {
            writeSourcesToMP4(mediaSources, syncInfoPresent);
        } else if (dumpStream) {
            dumpSource(mediaSource, dumpStreamFilename);
        } else if (dumpPCMStream) {
            OMXClient client;
            CHECK_EQ(client.connect(), (status_t)OK);

            sp<MediaSource> decSource =
                OMXCodec::Create(
                        client.interface(),
                        mediaSource->getFormat(),
                        false,
                        mediaSource,
                        0,
                        0);

            dumpSource(decSource, dumpStreamFilename);
        } else if (seekTest) {
            performSeekTest(mediaSource);
        } else {
            playSource(&client, mediaSource);
        }
    }

    if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
        CHECK_EQ((status_t)OK,
                 native_window_api_disconnect(
                     gSurface.get(), NATIVE_WINDOW_API_MEDIA));

        gSurface.clear();

        if (useSurfaceAlloc) {
            composerClient->dispose();
        }
    }

    client.disconnect();

    return 0;
}
Пример #6
0
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    bool audioOnly = false;
    bool listComponents = false;
    bool dumpProfiles = false;
    bool extractThumbnail = false;
    bool seekTest = false;
    gNumRepetitions = 1;
    gMaxNumFrames = 0;
    gReproduceBug = -1;
    gPreferSoftwareCodec = false;
    gPlaybackAudio = false;
    gWriteMP4 = false;

    sp<ALooper> looper;
    sp<ARTSPController> rtspController;

    int res;
    while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) {
        switch (res) {
            case 'a':
            {
                audioOnly = true;
                break;
            }

            case 'l':
            {
                listComponents = true;
                break;
            }

            case 'm':
            case 'n':
            case 'b':
            {
                char *end;
                long x = strtol(optarg, &end, 10);

                if (*end != '\0' || end == optarg || x <= 0) {
                    x = 1;
                }

                if (res == 'n') {
                    gNumRepetitions = x;
                } else if (res == 'm') {
                    gMaxNumFrames = x;
                } else {
                    CHECK_EQ(res, 'b');
                    gReproduceBug = x;
                }
                break;
            }

            case 'w':
            {
                gWriteMP4 = true;
                gWriteMP4Filename.setTo(optarg);
                break;
            }

            case 'p':
            {
                dumpProfiles = true;
                break;
            }

            case 't':
            {
                extractThumbnail = true;
                break;
            }

            case 's':
            {
                gPreferSoftwareCodec = true;
                break;
            }

            case 'o':
            {
                gPlaybackAudio = true;
                break;
            }

            case 'k':
            {
                seekTest = true;
                break;
            }

            case '?':
            case 'h':
            default:
            {
                usage(argv[0]);
                exit(1);
                break;
            }
        }
    }

    if (gPlaybackAudio && !audioOnly) {
        // This doesn't make any sense if we're decoding the video track.
        gPlaybackAudio = false;
    }

    argc -= optind;
    argv += optind;

    if (extractThumbnail) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IMediaMetadataRetriever> retriever =
            service->createMetadataRetriever(getpid());

        CHECK(retriever != NULL);

        for (int k = 0; k < argc; ++k) {
            const char *filename = argv[k];

            CHECK_EQ(retriever->setDataSource(filename), (status_t)OK);
            sp<IMemory> mem =
                    retriever->getFrameAtTime(-1,
                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);

            if (mem != NULL) {
                printf("getFrameAtTime(%s) => OK\n", filename);
            } else {
                mem = retriever->extractAlbumArt();

                if (mem != NULL) {
                    printf("extractAlbumArt(%s) => OK\n", filename);
                } else {
                    printf("both getFrameAtTime and extractAlbumArt "
                           "failed on file '%s'.\n", filename);
                }
            }
        }

        return 0;
    }

    if (dumpProfiles) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service =
            interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);

        const char *kMimeTypes[] = {
            MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
            MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AAC,
            MEDIA_MIMETYPE_AUDIO_AMR_NB, MEDIA_MIMETYPE_AUDIO_AMR_WB,
            MEDIA_MIMETYPE_AUDIO_MPEG
        };

        for (size_t k = 0; k < sizeof(kMimeTypes) / sizeof(kMimeTypes[0]);
             ++k) {
            printf("type '%s':\n", kMimeTypes[k]);

            Vector<CodecCapabilities> results;
            CHECK_EQ(QueryCodecs(omx, kMimeTypes[k],
                                 true, // queryDecoders
                                 &results), (status_t)OK);

            for (size_t i = 0; i < results.size(); ++i) {
                printf("  decoder '%s' supports ",
                       results[i].mComponentName.string());

                if (results[i].mProfileLevels.size() == 0) {
                    printf("NOTHING.\n");
                    continue;
                }

                for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) {
                    const CodecProfileLevel &profileLevel =
                        results[i].mProfileLevels[j];

                    printf("%s%ld/%ld", j > 0 ? ", " : "",
                           profileLevel.mProfile, profileLevel.mLevel);
                }

                printf("\n");
            }
        }
    }

    if (listComponents) {
        sp<IServiceManager> sm = defaultServiceManager();
        sp<IBinder> binder = sm->getService(String16("media.player"));
        sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);

        CHECK(service.get() != NULL);

        sp<IOMX> omx = service->getOMX();
        CHECK(omx.get() != NULL);

        List<IOMX::ComponentInfo> list;
        omx->listNodes(&list);

        for (List<IOMX::ComponentInfo>::iterator it = list.begin();
             it != list.end(); ++it) {
            printf("%s\n", (*it).mName.string());
        }
    }

    DataSource::RegisterDefaultSniffers();

    OMXClient client;
    status_t err = client.connect();

    for (int k = 0; k < argc; ++k) {
        bool syncInfoPresent = true;

        const char *filename = argv[k];

        sp<DataSource> dataSource = DataSource::CreateFromURI(filename);

        if (strncasecmp(filename, "sine:", 5)
                && strncasecmp(filename, "rtsp://", 7)
                && strncasecmp(filename, "httplive://", 11)
                && dataSource == NULL) {
            fprintf(stderr, "Unable to create data source.\n");
            return 1;
        }

        bool isJPEG = false;

        size_t len = strlen(filename);
        if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
            isJPEG = true;
        }

        Vector<sp<MediaSource> > mediaSources;
        sp<MediaSource> mediaSource;

        if (isJPEG) {
            mediaSource = new JPEGSource(dataSource);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else if (!strncasecmp("sine:", filename, 5)) {
            char *end;
            long sampleRate = strtol(filename + 5, &end, 10);

            if (end == filename + 5) {
                sampleRate = 44100;
            }
            mediaSource = new SineSource(sampleRate, 1);
            if (gWriteMP4) {
                mediaSources.push(mediaSource);
            }
        } else {
            sp<MediaExtractor> extractor;

            if (!strncasecmp("rtsp://", filename, 7)) {
                if (looper == NULL) {
                    looper = new ALooper;
                    looper->start();
                }

                rtspController = new ARTSPController(looper);
                status_t err = rtspController->connect(filename);
                if (err != OK) {
                    fprintf(stderr, "could not connect to rtsp server.\n");
                    return -1;
                }

                extractor = rtspController.get();

                syncInfoPresent = false;
            } else if (!strncasecmp("httplive://", filename, 11)) {
                String8 uri("http://");
                uri.append(filename + 11);

                dataSource = new LiveSource(uri.string());
                dataSource = new NuCachedSource2(dataSource);

                extractor =
                    MediaExtractor::Create(
                            dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);

                syncInfoPresent = false;
            } else {
                extractor = MediaExtractor::Create(dataSource);
                if (extractor == NULL) {
                    fprintf(stderr, "could not create extractor.\n");
                    return -1;
                }
            }

            size_t numTracks = extractor->countTracks();

            if (gWriteMP4) {
                bool haveAudio = false;
                bool haveVideo = false;
                for (size_t i = 0; i < numTracks; ++i) {
                    sp<MediaSource> source = extractor->getTrack(i);

                    const char *mime;
                    CHECK(source->getFormat()->findCString(
                                kKeyMIMEType, &mime));

                    bool useTrack = false;
                    if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
                        haveAudio = true;
                        useTrack = true;
                    } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
                        haveVideo = true;
                        useTrack = true;
                    }

                    if (useTrack) {
                        mediaSources.push(source);

                        if (haveAudio && haveVideo) {
                            break;
                        }
                    }
                }
            } else {
                sp<MetaData> meta;
                size_t i;
                for (i = 0; i < numTracks; ++i) {
                    meta = extractor->getTrackMetaData(
                            i, MediaExtractor::kIncludeExtensiveMetaData);

                    const char *mime;
                    meta->findCString(kKeyMIMEType, &mime);

                    if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
                        break;
                    }

                    if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
                        break;
                    }

                    meta = NULL;
                }

                if (meta == NULL) {
                    fprintf(stderr,
                            "No suitable %s track found. The '-a' option will "
                            "target audio tracks only, the default is to target "
                            "video tracks only.\n",
                            audioOnly ? "audio" : "video");
                    return -1;
                }

                int64_t thumbTimeUs;
                if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
                    printf("thumbnailTime: %lld us (%.2f secs)\n",
                           thumbTimeUs, thumbTimeUs / 1E6);
                }

                mediaSource = extractor->getTrack(i);
            }
        }

        if (gWriteMP4) {
            writeSourcesToMP4(mediaSources, syncInfoPresent);
        } else if (seekTest) {
            performSeekTest(mediaSource);
        } else {
            playSource(&client, mediaSource);
        }

        if (rtspController != NULL) {
            rtspController->disconnect();
            rtspController.clear();

            sleep(3);
        }
    }

    client.disconnect();

    return 0;
}