int JTvInputHal::removeStream(int deviceId, int streamId) { KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId); if (connections.indexOfKey(streamId) < 0) { return BAD_VALUE; } Connection& connection = connections.editValueFor(streamId); if (connection.mSurface == NULL) { // Nothing to do return NO_ERROR; } if (connection.mThread != NULL) { connection.mThread->shutdown(); connection.mThread.clear(); } if (mDevice->close_stream(mDevice, deviceId, streamId) != 0) { ALOGE("Couldn't remove stream"); return BAD_VALUE; } if (connection.mSourceHandle != NULL) { connection.mSourceHandle.clear(); } if (Surface::isValid(connection.mSurface)) { connection.mSurface.clear(); } if (connection.mSurface != NULL) { connection.mSurface->setSidebandStream(NULL); connection.mSurface.clear(); } return NO_ERROR; }
status_t GPUHardware::request(int pid, const sp<IGPUCallback>& callback, ISurfaceComposer::gpu_info_t* gpu) { if (callback == 0) return BAD_VALUE; sp<IMemory> gpuHandle; LOGD("pid %d requesting gpu core (owner = %d)", pid, mOwner); Mutex::Autolock _l(mLock); status_t err = requestLocked(pid); if (err == NO_ERROR) { // it's guaranteed to be there, be construction Client& client = mClients.editValueFor(pid); registerCallbackLocked(callback, client); gpu->count = 2; gpu->regions[0].region = client.smi.map(); gpu->regions[1].region = client.ebi.map(); gpu->regs = client.reg.map(); gpu->regions[0].reserved = 0; gpu->regions[1].reserved = GPU_RESERVED_SIZE; if (gpu->regs != 0) { //LOGD("gpu core granted to pid %d, handle base=%p", // mOwner, gpu->regs->pointer()); } mCallback = callback; } else { LOGW("couldn't grant gpu core to pid %d", pid); } return err; }
void JTvInputHal::onStreamConfigurationsChanged(int deviceId) { { Mutex::Autolock autoLock(&mLock); KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId); for (size_t i = 0; i < connections.size(); ++i) { removeStream(deviceId, connections.keyAt(i)); } connections.clear(); } JNIEnv* env = AndroidRuntime::getJNIEnv(); env->CallVoidMethod( mThiz, gTvInputHalClassInfo.streamConfigsChanged, deviceId); }
void JTvInputHal::onCaptured(int deviceId, int streamId, uint32_t seq, bool succeeded) { sp<BufferProducerThread> thread; { Mutex::Autolock autoLock(&mLock); KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId); Connection& connection = connections.editValueFor(streamId); if (connection.mThread == NULL) { ALOGE("capture thread not existing."); return; } thread = connection.mThread; } thread->onCaptured(seq, succeeded); if (seq == 0) { JNIEnv* env = AndroidRuntime::getJNIEnv(); env->CallVoidMethod( mThiz, gTvInputHalClassInfo.firstFrameCaptured, deviceId, streamId); } }
int JTvInputHal::addOrUpdateStream(int deviceId, int streamId, const sp<Surface>& surface) { KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId); if (connections.indexOfKey(streamId) < 0) { connections.add(streamId, Connection()); } Connection& connection = connections.editValueFor(streamId); if (connection.mSurface == surface) { // Nothing to do return NO_ERROR; } // Clear the surface in the connection. if (connection.mSurface != NULL) { if (connection.mStreamType == TV_STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE) { if (Surface::isValid(connection.mSurface)) { connection.mSurface->setSidebandStream(NULL); } } connection.mSurface.clear(); } if (connection.mSourceHandle == NULL && connection.mThread == NULL) { // Need to configure stream int numConfigs = 0; const tv_stream_config_t* configs = NULL; if (mDevice->get_stream_configurations( mDevice, deviceId, &numConfigs, &configs) != 0) { ALOGE("Couldn't get stream configs"); return UNKNOWN_ERROR; } int configIndex = -1; for (int i = 0; i < numConfigs; ++i) { if (configs[i].stream_id == streamId) { configIndex = i; break; } } if (configIndex == -1) { ALOGE("Cannot find a config with given stream ID: %d", streamId); return BAD_VALUE; } connection.mStreamType = configs[configIndex].type; tv_stream_t stream; stream.stream_id = configs[configIndex].stream_id; if (connection.mStreamType == TV_STREAM_TYPE_BUFFER_PRODUCER) { stream.buffer_producer.width = configs[configIndex].max_video_width; stream.buffer_producer.height = configs[configIndex].max_video_height; } if (mDevice->open_stream(mDevice, deviceId, &stream) != 0) { ALOGE("Couldn't add stream"); return UNKNOWN_ERROR; } if (connection.mStreamType == TV_STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE) { connection.mSourceHandle = NativeHandle::create( stream.sideband_stream_source_handle, false); } else if (connection.mStreamType == TV_STREAM_TYPE_BUFFER_PRODUCER) { if (connection.mThread != NULL) { connection.mThread->shutdown(); } connection.mThread = TvInputHalFactory::get()->createBufferProducerThread(mDevice, deviceId, &stream); if (connection.mThread == NULL) { ALOGE("No memory for BufferProducerThread"); // clean up if (mDevice->close_stream(mDevice, deviceId, streamId) != 0) { ALOGE("Couldn't remove stream"); } return NO_MEMORY; } } } connection.mSurface = surface; if (connection.mStreamType == TV_STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE) { connection.mSurface->setSidebandStream(connection.mSourceHandle); } else if (connection.mStreamType == TV_STREAM_TYPE_BUFFER_PRODUCER) { if (NO_ERROR != connection.mThread->setSurface(surface)) { ALOGE("failed to setSurface"); // clean up connection.mThread.clear(); if (mDevice->close_stream(mDevice, deviceId, streamId) != 0) { ALOGE("Couldn't remove stream"); } if (connection.mSurface != NULL) { connection.mSurface.clear(); } return UNKNOWN_ERROR; } connection.mThread->run(); } return NO_ERROR; }
static int decode( const android::sp<android::ALooper> &looper, const char *path, bool useAudio, bool useVideo, const android::sp<android::Surface> &surface) { using namespace android; static int64_t kTimeout = 500ll; sp<NuMediaExtractor> extractor = new NuMediaExtractor; if (extractor->setDataSource(path) != OK) { fprintf(stderr, "unable to instantiate extractor.\n"); return 1; } KeyedVector<size_t, CodecState> stateByTrack; bool haveAudio = false; bool haveVideo = false; for (size_t i = 0; i < extractor->countTracks(); ++i) { sp<AMessage> format; status_t err = extractor->getTrackFormat(i, &format); CHECK_EQ(err, (status_t)OK); AString mime; CHECK(format->findString("mime", &mime)); bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6); bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); if (useAudio && !haveAudio && isAudio) { haveAudio = true; } else if (useVideo && !haveVideo && isVideo) { haveVideo = true; } else { continue; } ALOGV("selecting track %d", i); err = extractor->selectTrack(i); CHECK_EQ(err, (status_t)OK); CodecState *state = &stateByTrack.editValueAt(stateByTrack.add(i, CodecState())); state->mNumBytesDecoded = 0; state->mNumBuffersDecoded = 0; state->mIsAudio = isAudio; state->mCodec = MediaCodec::CreateByType( looper, mime.c_str(), false /* encoder */); CHECK(state->mCodec != NULL); err = state->mCodec->configure( format, isVideo ? surface : NULL, NULL /* crypto */, 0 /* flags */); CHECK_EQ(err, (status_t)OK); state->mSignalledInputEOS = false; state->mSawOutputEOS = false; } CHECK(!stateByTrack.isEmpty()); int64_t startTimeUs = ALooper::GetNowUs(); for (size_t i = 0; i < stateByTrack.size(); ++i) { CodecState *state = &stateByTrack.editValueAt(i); sp<MediaCodec> codec = state->mCodec; CHECK_EQ((status_t)OK, codec->start()); CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers)); CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers)); ALOGV("got %d input and %d output buffers", state->mInBuffers.size(), state->mOutBuffers.size()); } bool sawInputEOS = false; for (;;) { if (!sawInputEOS) { size_t trackIndex; status_t err = extractor->getSampleTrackIndex(&trackIndex); if (err != OK) { ALOGV("saw input eos"); sawInputEOS = true; } else { CodecState *state = &stateByTrack.editValueFor(trackIndex); size_t index; err = state->mCodec->dequeueInputBuffer(&index, kTimeout); if (err == OK) { ALOGV("filling input buffer %d", index); const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index); err = extractor->readSampleData(buffer); CHECK_EQ(err, (status_t)OK); int64_t timeUs; err = extractor->getSampleTime(&timeUs); CHECK_EQ(err, (status_t)OK); uint32_t bufferFlags = 0; err = state->mCodec->queueInputBuffer( index, 0 /* offset */, buffer->size(), timeUs, bufferFlags); CHECK_EQ(err, (status_t)OK); extractor->advance(); } else { CHECK_EQ(err, -EAGAIN); } } } else { for (size_t i = 0; i < stateByTrack.size(); ++i) { CodecState *state = &stateByTrack.editValueAt(i); if (!state->mSignalledInputEOS) { size_t index; status_t err = state->mCodec->dequeueInputBuffer(&index, kTimeout); if (err == OK) { ALOGV("signalling input EOS on track %d", i); err = state->mCodec->queueInputBuffer( index, 0 /* offset */, 0 /* size */, 0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS); CHECK_EQ(err, (status_t)OK); state->mSignalledInputEOS = true; } else { CHECK_EQ(err, -EAGAIN); } } } } bool sawOutputEOSOnAllTracks = true; for (size_t i = 0; i < stateByTrack.size(); ++i) { CodecState *state = &stateByTrack.editValueAt(i); if (!state->mSawOutputEOS) { sawOutputEOSOnAllTracks = false; break; } } if (sawOutputEOSOnAllTracks) { break; } for (size_t i = 0; i < stateByTrack.size(); ++i) { CodecState *state = &stateByTrack.editValueAt(i); if (state->mSawOutputEOS) { continue; } size_t index; size_t offset; size_t size; int64_t presentationTimeUs; uint32_t flags; status_t err = state->mCodec->dequeueOutputBuffer( &index, &offset, &size, &presentationTimeUs, &flags, kTimeout); if (err == OK) { ALOGV("draining output buffer %d, time = %lld us", index, presentationTimeUs); ++state->mNumBuffersDecoded; state->mNumBytesDecoded += size; err = state->mCodec->releaseOutputBuffer(index); CHECK_EQ(err, (status_t)OK); if (flags & MediaCodec::BUFFER_FLAG_EOS) { ALOGV("reached EOS on output."); state->mSawOutputEOS = true; } } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { ALOGV("INFO_OUTPUT_BUFFERS_CHANGED"); CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers(&state->mOutBuffers)); ALOGV("got %d output buffers", state->mOutBuffers.size()); } else if (err == INFO_FORMAT_CHANGED) { sp<AMessage> format; CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format)); ALOGV("INFO_FORMAT_CHANGED: %s", format->debugString().c_str()); } else { CHECK_EQ(err, -EAGAIN); } } } int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs; for (size_t i = 0; i < stateByTrack.size(); ++i) { CodecState *state = &stateByTrack.editValueAt(i); CHECK_EQ((status_t)OK, state->mCodec->release()); if (state->mIsAudio) { printf("track %zu: %" PRId64 " bytes received. %.2f KB/sec\n", i, state->mNumBytesDecoded, state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs); } else { printf("track %zu: %" PRId64 " frames decoded, %.2f fps. %" PRId64 " bytes received. %.2f KB/sec\n", i, state->mNumBuffersDecoded, state->mNumBuffersDecoded * 1E6 / elapsedTimeUs, state->mNumBytesDecoded, state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs); } } return 0; }