Example #1
0
int Looper::addFd(int fd, int ident, int events, const sp<LooperCallback>& callback, void* data) {
#if DEBUG_CALLBACKS
    ALOGD("%p ~ addFd - fd=%d, ident=%d, events=0x%x, callback=%p, data=%p", this, fd, ident,
            events, callback.get(), data);
#endif

    if (!callback.get()) {
        if (! mAllowNonCallbacks) {
            ALOGE("Invalid attempt to set NULL callback but not allowed for this looper.");
            return -1;
        }

        if (ident < 0) {
            ALOGE("Invalid attempt to set NULL callback with ident < 0.");
            return -1;
        }
    } else {
        ident = POLL_CALLBACK;
    }

    { // acquire lock
        AutoMutex _l(mLock);

        Request request;
        request.fd = fd;
        request.ident = ident;
        request.events = events;
        request.seq = mNextRequestSeq++;
        request.callback = callback;
        request.data = data;
        if (mNextRequestSeq == -1) mNextRequestSeq = 0; // reserve sequence number -1

        struct epoll_event eventItem;
        request.initEventItem(&eventItem);

        ssize_t requestIndex = mRequests.indexOfKey(fd);
        if (requestIndex < 0) {
            int epollResult = epoll_ctl(mEpollFd, EPOLL_CTL_ADD, fd, & eventItem);
            if (epollResult < 0) {
                ALOGE("Error adding epoll events for fd %d: %s", fd, strerror(errno));
                return -1;
            }
            mRequests.add(fd, request);
        } else {
            int epollResult = epoll_ctl(mEpollFd, EPOLL_CTL_MOD, fd, & eventItem);
            if (epollResult < 0) {
                if (errno == ENOENT) {
                    // Tolerate ENOENT because it means that an older file descriptor was
                    // closed before its callback was unregistered and meanwhile a new
                    // file descriptor with the same number has been created and is now
                    // being registered for the first time.  This error may occur naturally
                    // when a callback has the side-effect of closing the file descriptor
                    // before returning and unregistering itself.  Callback sequence number
                    // checks further ensure that the race is benign.
                    //
                    // Unfortunately due to kernel limitations we need to rebuild the epoll
                    // set from scratch because it may contain an old file handle that we are
                    // now unable to remove since its file descriptor is no longer valid.
                    // No such problem would have occurred if we were using the poll system
                    // call instead, but that approach carries others disadvantages.
#if DEBUG_CALLBACKS
                    ALOGD("%p ~ addFd - EPOLL_CTL_MOD failed due to file descriptor "
                            "being recycled, falling back on EPOLL_CTL_ADD: %s",
                            this, strerror(errno));
#endif
                    epollResult = epoll_ctl(mEpollFd, EPOLL_CTL_ADD, fd, & eventItem);
                    if (epollResult < 0) {
                        ALOGE("Error modifying or adding epoll events for fd %d: %s",
                                fd, strerror(errno));
                        return -1;
                    }
                    scheduleEpollRebuildLocked();
                } else {
                    ALOGE("Error modifying epoll events for fd %d: %s", fd, strerror(errno));
                    return -1;
                }
            }
            mRequests.replaceValueAt(requestIndex, request);
        }
    } // release lock
    return 1;
}
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
{
  MOZ_ASSERT(aSeekTimeUs >= -1);

  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aSeekTimeUs != -1) {
    MediaSource::ReadOptions options;
    options.setSeekTo(aSeekTimeUs);
    err = mVideoSource->read(&mVideoBuffer, &options);
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  if (err == OK && mVideoBuffer->range_length() > 0) {
    int64_t timeUs;
    int32_t keyFrame;

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      LOG("no frame time");
      return false;
    }

    if (timeUs < 0) {
      LOG("frame time %lld must be nonnegative", timeUs);
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
    }

    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
    size_t length = mVideoBuffer->range_length();

    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
      return false;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    LOG("mVideoSource INFO_FORMAT_CHANGED");
    if (!SetVideoFormat())
      return false;
    else
      return ReadVideo(aFrame, aSeekTimeUs);
  }
  else if (err == ERROR_END_OF_STREAM) {
    LOG("mVideoSource END_OF_STREAM");
  }
  else if (err != OK) {
    LOG("mVideoSource ERROR %#x", err);
  }

  return err == OK;
}
	// set the ISurface that the preview will use
	status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
		LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
		Mutex::Autolock lock(mLock);
		status_t result = checkPidAndHardware();
		if (result != NO_ERROR) return result;
		
		result = NO_ERROR;
		
		// return if no change in surface.
		// asBinder() is safe on NULL (returns NULL)
		if (surface->asBinder() == mSurface->asBinder()) {
			return result;
		}
		
		if (mSurface != 0) {
			LOG1("clearing old preview surface %p", mSurface.get());
			if (mUseOverlay) {
				// Force the destruction of any previous overlay
				sp<Overlay> dummy;
				mHardware->setOverlay(dummy);
				mOverlayRef = 0;
			} else {
				mSurface->unregisterBuffers();
			}
		}
		mSurface = surface;
		mOverlayRef = 0;
		// If preview has been already started, set overlay or register preview
		// buffers now.
#ifdef USE_OVERLAY_FORMAT_YCbCr_420_SP
		if (mHardware->previewEnabled() || mUseOverlay) {
#else
			if (mHardware->previewEnabled()) {
#endif
				if (mUseOverlay) {
#ifdef USE_OVERLAY_FORMAT_YCbCr_420_SP
					if (mSurface != NULL) {
#endif
						result = setOverlay();
#ifdef USE_OVERLAY_FORMAT_YCbCr_420_SP
					}
#endif
				} else if (mSurface != 0) {
					result = registerPreviewBuffers();
				}
			}
			
			return result;
		}
		
		status_t CameraService::Client::registerPreviewBuffers() {
			int w, h;
			CameraParameters params(mHardware->getParameters());
			params.getPreviewSize(&w, &h);
			
			//for 720p recording , preview can be 800X448
			if(w ==  preview_sizes[0].width && h== preview_sizes[0].height){
				LOGD("registerpreviewbufs :changing dimensions to 768X432 for 720p recording.");
				w = preview_sizes[1].width;
				h = preview_sizes[1].height;
			}
			
			// FIXME: don't use a hardcoded format here.
			ISurface::BufferHeap buffers(w, h, w, h,
										 HAL_PIXEL_FORMAT_YCrCb_420_SP,
										 mOrientation,
										 0,
										 mHardware->getPreviewHeap());
			
			status_t result = mSurface->registerBuffers(buffers);
			if (result != NO_ERROR) {
				LOGE("registerBuffers failed with status %d", result);
			}
			return result;
		}
		
		status_t CameraService::Client::setOverlay() {
			int w, h;
			CameraParameters params(mHardware->getParameters());
			params.getPreviewSize(&w, &h);
			
			//for 720p recording , preview can be 800X448
			if(w == preview_sizes[0].width && h==preview_sizes[0].height){
				LOGD("Changing overlay dimensions to 768X432 for 720p recording.");
				w = preview_sizes[1].width;
				h = preview_sizes[1].height;
			}
			
			if (w != mOverlayW || h != mOverlayH || mOrientationChanged) {
				// Force the destruction of any previous overlay
				sp<Overlay> dummy;
				mHardware->setOverlay(dummy);
				mOverlayRef = 0;
#ifdef USE_OVERLAY_FORMAT_YCbCr_420_SP
				if (mOverlay != NULL) {
					mOverlay->destroy();
				}
#endif
				mOrientationChanged = false;
			}
			
			status_t result = NO_ERROR;
			if (mSurface == 0) {
				result = mHardware->setOverlay(NULL);
			} else {
				if (mOverlayRef == 0) {
					// FIXME:
					// Surfaceflinger may hold onto the previous overlay reference for some
					// time after we try to destroy it. retry a few times. In the future, we
					// should make the destroy call block, or possibly specify that we can
					// wait in the createOverlay call if the previous overlay is in the
					// process of being destroyed.
					for (int retry = 0; retry < 50; ++retry) {
						mOverlayRef = mSurface->createOverlay(w, h,
#ifdef USE_OVERLAY_FORMAT_YCbCr_420_SP
															  HAL_PIXEL_FORMAT_YCbCr_420_SP,
#else
															  OVERLAY_FORMAT_DEFAULT,
#endif
															  mOrientation);
						if (mOverlayRef != 0) break;
						LOGW("Overlay create failed - retrying");
						usleep(20000);
					}
					if (mOverlayRef == 0) {
						LOGE("Overlay Creation Failed!");
						return -EINVAL;
					}
#ifdef USE_OVERLAY_FORMAT_YCbCr_420_SP
					mOverlay = new Overlay(mOverlayRef);
					result = mHardware->setOverlay(mOverlay);
#else
					result = mHardware->setOverlay(new Overlay(mOverlayRef));
#endif
				}
			}
			if (result != NO_ERROR) {
				LOGE("mHardware->setOverlay() failed with status %d\n", result);
				return result;
			}
			
			mOverlayW = w;
			mOverlayH = h;
			
			return result;
		}
		
		// set the preview callback flag to affect how the received frames from
		// preview are handled.
		void CameraService::Client::setPreviewCallbackFlag(int callback_flag) {
			LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, getCallingPid());
			Mutex::Autolock lock(mLock);
			if (checkPidAndHardware() != NO_ERROR) return;
			
			mPreviewCallbackFlag = callback_flag;
			
			// If we don't use overlay, we always need the preview frame for display.
			// If we do use overlay, we only need the preview frame if the user
			// wants the data.
			if (mUseOverlay) {
				if(mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
					enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
				} else {
					disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
				}
			}
		}
int Merge::registerStream(const sp<OutStream>& stream)
{
    if (stream == NULL) {
        ALOGE("Merge: stream is invalid, cannot register");
        return -EINVAL;
    }

    const PcmParams &params = stream->getParams();
    const SlotMap &map = stream->getSlotMap();

    ALOGV("Merge: register stream %p src 0x%04x dst 0x%04x",
          stream.get(), map.getSrcMask(), map.getDstMask());

    if (params.sampleBits != mParams.sampleBits) {
        ALOGE("Merge: stream has incompatible sample size");
        return -EINVAL;
    }

    if (!stream->canResample() && (params.sampleRate != mParams.sampleRate)) {
        ALOGE("Merge: stream has incompatible sample rate");
        return -EINVAL;
    }

    if (!stream->canResample() && (params.frameCount != mParams.frameCount)) {
        ALOGE("Merge: stream has incompatible frame count");
        return -EINVAL;
    }

    /*
     * sanity check that defined dest channels fall within the defined number
     * of channels for the output
     */
    if (map.getDstMask() >= (1U << mParams.channels)) {
        ALOGE("Merge: stream's dest mask 0x%x requests channels not present in"
              " the output (%u channel output)",
              map.getDstMask(), mParams.channels);
        return -EINVAL;
    }

    AutoMutex lock(mLock);

    /* check if dst channels overlap with already registered dst channels */
    if (map.getDstMask() & mDstMask) {
        ALOGE("Merge: stream's dst mask overlaps already registered streams");
        return -EINVAL;
    }

    if (mStreams.find(stream) != mStreams.end()) {
        ALOGE("Merge: stream is already registered");
        return -EINVAL;
    }

    if (mStreams.size() == mParams.channels) {
        ALOGE("Merge: max number of streams registered");
        return -ENOMEM;
    }

    mStreams.insert(stream);

    mDstMask |= map.getDstMask();

    return 0;
}
static sp<IOMX> GetOMX() {
  if(sOMX.get() == NULL) {
    sOMX = reinterpret_cast<IOMX*>(new OMX);
  }
  return sOMX;
}
Example #6
0
JNIEXPORT void JNICALL Java_com_mediatek_ut_SurfaceFlingerTest_disconnect(JNIEnv /**_env*/, jobject /*_this*/, jint id) {
    LOGD("disconnect");
    utProc->disconnect(id);
    LOGI("disconnect, id=%d, utProc=%p", id, utProc.get());
}
void DeathRecipientList::add(const sp<JavaDeathRecipient>& recipient) {
    AutoMutex _l(mLock);

    LOGDEATH("DRL @ %p : add JDR %p", this, recipient.get());
    mList.push_back(recipient);
}
void RenderProxy::initialize(const sp<Surface>& surface) {
    SETUP_TASK(initialize);
    args->context = mContext;
    args->surface = surface.get();
    post(task);
}
void RenderProxy::updateSurface(const sp<Surface>& surface) {
    SETUP_TASK(updateSurface);
    args->context = mContext;
    args->surface = surface.get();
    postAndWait(task);
}
Example #10
0
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
{
  if (!mVideoSource.get())
    return false;

  for (;;) {
    ReleaseVideoBuffer();

    status_t err;

    if (aSeekTimeUs != -1) {
      MediaSource::ReadOptions options;
      options.setSeekTo(aSeekTimeUs);
      err = mVideoSource->read(&mVideoBuffer, &options);
    } else {
      err = mVideoSource->read(&mVideoBuffer);
    }

    aSeekTimeUs = -1;

    if (err == OK) {
      if (mVideoBuffer->range_length() == 0) // If we get a spurious empty buffer, keep going
        continue;

      int64_t timeUs;
      int32_t unreadable;
      int32_t keyFrame;

      if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
        LOG("no key time");
        return false;
      }

      if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
        keyFrame = 0;
      }

      if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
        unreadable = 0;
      }

      LOG("data: %p size: %u offset: %u length: %u unreadable: %d",
          mVideoBuffer->data(), 
          mVideoBuffer->size(),
          mVideoBuffer->range_offset(),
          mVideoBuffer->range_length(),
          unreadable);

      char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
      size_t length = mVideoBuffer->range_length();

      if (unreadable) {
        LOG("video frame is unreadable");
      }

      if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
        return false;
      }

      return true;
    }

    if (err == INFO_FORMAT_CHANGED) {
      // If the format changed, update our cached info.
      if (!SetVideoFormat()) {
        return false;
      }

      // Ok, try to read a buffer again.
      continue;
    }

    /* err == ERROR_END_OF_STREAM */
    break;
  }

  return false;
}
status_t Camera3StreamSplitter::addOutputLocked(const sp<Surface>& outputQueue) {
    ATRACE_CALL();
    if (outputQueue == nullptr) {
        SP_LOGE("addOutput: outputQueue must not be NULL");
        return BAD_VALUE;
    }

    sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
    // Connect to the buffer producer
    sp<OutputListener> listener(new OutputListener(this, gbp));
    IInterface::asBinder(gbp)->linkToDeath(listener);
    status_t res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
    if (res != NO_ERROR) {
        SP_LOGE("addOutput: failed to connect (%d)", res);
        return res;
    }

    // Query consumer side buffer count, and update overall buffer count
    int maxConsumerBuffers = 0;
    res = static_cast<ANativeWindow*>(outputQueue.get())->query(
            outputQueue.get(),
            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
    if (res != OK) {
        SP_LOGE("%s: Unable to query consumer undequeued buffer count"
              " for surface", __FUNCTION__);
        return res;
    }

    SP_LOGV("%s: Consumer wants %d buffers, Producer wants %zu", __FUNCTION__,
            maxConsumerBuffers, mMaxHalBuffers);
    size_t totalBufferCount = maxConsumerBuffers + mMaxHalBuffers;
    res = native_window_set_buffer_count(outputQueue.get(),
            totalBufferCount);
    if (res != OK) {
        SP_LOGE("%s: Unable to set buffer count for surface %p",
                __FUNCTION__, outputQueue.get());
        return res;
    }

    // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
    // We need skip these cases as timeout will disable the non-blocking (async) mode.
    uint64_t usage = 0;
    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(outputQueue.get()), &usage);
    if (!(usage & (GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_TEXTURE))) {
        outputQueue->setDequeueTimeout(kDequeueBufferTimeout);
    }

    res = gbp->allowAllocation(false);
    if (res != OK) {
        SP_LOGE("%s: Failed to turn off allocation for outputQueue", __FUNCTION__);
        return res;
    }

    // Add new entry into mOutputs
    mOutputs.push_back(gbp);
    mNotifiers[gbp] = listener;
    mOutputSlots[gbp] = std::make_unique<OutputSlots>(totalBufferCount);

    mMaxConsumerBuffers += maxConsumerBuffers;
    return NO_ERROR;
}
Example #12
0
bool OmxDecoder::Init() {
  //register sniffers, if they are not registered in this process.
  DataSource::RegisterDefaultSniffers();

  sp<DataSource> dataSource = new MediaStreamSource(mPluginHost, mDecoder);
  if (dataSource->initCheck()) {
    return false;
  }

  mPluginHost->SetMetaDataReadMode(mDecoder);

  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
  if (extractor == NULL) {
    return false;
  }

  sp<MediaSource> videoTrack;
  sp<MediaSource> audioTrack;
  const char *audioMime = NULL;
  bool audioMetaFound = false;

  for (size_t i = 0; i < extractor->countTracks(); ++i) {
    sp<MetaData> meta = extractor->getTrackMetaData(i);

    int32_t bitRate;
    if (!meta->findInt32(kKeyBitRate, &bitRate))
      bitRate = 0;

    const char *mime;
    if (!meta->findCString(kKeyMIMEType, &mime)) {
      continue;
    }

    if (videoTrack == NULL && !strncasecmp(mime, "video/", 6)) {
      videoTrack = extractor->getTrack(i);
    } else if (audioTrack == NULL && !strncasecmp(mime, "audio/", 6)) {
      audioTrack = extractor->getTrack(i);
      audioMime = mime;
      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
        return false;
      }
      audioMetaFound = true;
      LOG("channelCount: %d sampleRate: %d",
           mAudioChannels, mAudioSampleRate);
    }
  }

  if (videoTrack == NULL && audioTrack == NULL) {
    return false;
  }

  mPluginHost->SetPlaybackReadMode(mDecoder);

  int64_t totalDurationUs = 0;

  sp<MediaSource> videoSource;
  if (videoTrack != NULL) {
    videoSource = OMXCodec::Create(GetOMX(),
                                   videoTrack->getFormat(),
                                   false, // decoder
                                   videoTrack,
                                   NULL,
                                   0); // flags (prefer hw codecs)
    if (videoSource == NULL) {
      return false;
    }

    if (videoSource->start() != OK) {
      return false;
    }

    int64_t durationUs;
    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  sp<MediaSource> audioSource;
  if (audioTrack != NULL) {
    if (!strcasecmp(audioMime, "audio/raw")) {
      audioSource = audioTrack;
    } else {
      audioSource = OMXCodec::Create(GetOMX(),
                                     audioTrack->getFormat(),
                                     false, // decoder
                                     audioTrack);
    }
    if (audioSource == NULL) {
      return false;
    }
    if (audioSource->start() != OK) {
      return false;
    }

    int64_t durationUs;
    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  // set decoder state
  mVideoTrack = videoTrack;
  mVideoSource = videoSource;
  mAudioTrack = audioTrack;
  mAudioSource = audioSource;
  mDurationUs = totalDurationUs;

  if (mVideoSource.get() && !SetVideoFormat())
    return false;

  if (!audioMetaFound && mAudioSource.get() && !SetAudioFormat())
    return false;

  return true;
}
Example #13
0
static sp<IOMX> GetOMX() {
  if(sOMX.get() == NULL) {
    sOMX = new OMX;
    }
  return sOMX;
}
status_t PVPlayer::setVideoSurface(const sp<ISurface>& surface)
{
    LOGV("setVideoSurface(%p)", surface.get());
    mSurface = surface;
    return OK;
}
bool BaseObj::equals(sp<const BaseObj> obj) {
    // Early-out check to see if both BaseObjs are actually the same
    if (this == obj.get())
        return true;
    return mID == obj->mID;
}
bool RenderProxy::pauseSurface(const sp<Surface>& surface) {
    SETUP_TASK(pauseSurface);
    args->context = mContext;
    args->surface = surface.get();
    return (bool) postAndWait(task);
}
Example #17
0
MERROR
MAIN_CLASS_NAME::
queryIOStreamInfoSet(
    NodeId_T const& nodeId,
    sp<IStreamInfoSet const>& rIn,
    sp<IStreamInfoSet const>& rOut
) const
{
    RWLock::AutoRLock _l(mRWLock);
    //
    if  ( mpNodeMap == 0 ) {
        MY_LOGE("frameNo:%u NULL node map", getFrameNo());
        rIn = 0;
        rOut = 0;
        return NO_INIT;
    }
    //
    sp<IPipelineFrameNodeMapControl::INode> pNode = mpNodeMap->getNodeFor(nodeId);
    if  ( pNode == 0 ) {
        MY_LOGE("frameNo:%u nodeId:%#"PRIxPTR" not found", getFrameNo(), nodeId);
        rIn = 0;
        rOut = 0;
        return NAME_NOT_FOUND;
    }
    //
    rIn = pNode->getIStreams();
    rOut= pNode->getOStreams();
    //
    if  ( rIn == 0 || rOut == 0 ) {
        MY_LOGE("frameNo:%u nodeId:%#"PRIxPTR" IStreams:%p OStreams:%p", getFrameNo(), nodeId, rIn.get(), rOut.get());
        return NO_INIT;
    }
    //
    return OK;
}
Example #18
0
sp<EIoBuffer> EIoBufferDecoder::decodeFully(sp<EIoBuffer>& in) {
	int contentLength = ctx->getContentLength();
	sp<EIoBuffer> decodedBuffer = ctx->getDecodedBuffer();

	int oldLimit = in->limit();

	// Retrieve fixed length content
	if (contentLength > -1) {
		if (decodedBuffer == null) {
			decodedBuffer = EIoBuffer::allocate(contentLength)->setAutoExpand(true);
		}

		// If not enough data to complete the decoding
		if (in->remaining() < contentLength) {
			int readBytes = in->remaining();
			decodedBuffer->put(in.get());
			ctx->setDecodedBuffer(decodedBuffer);
			ctx->setContentLength(contentLength - readBytes);
			return null;

		}

		int newLimit = in->position() + contentLength;
		in->limit(newLimit);
		decodedBuffer->put(in.get());
		decodedBuffer->flip();
		in->limit(oldLimit);
		ctx->reset();

		return decodedBuffer;
	}

	// Not a fixed length matching so try to find a delimiter match
	int oldPos = in->position();
	int matchCount = ctx->getMatchCount();
	sp<EIoBuffer> delimiter = ctx->getDelimiter();

	while (in->hasRemaining()) {
		byte b = in->get();
		if (delimiter->get(matchCount) == b) {
			matchCount++;
			if (matchCount == delimiter->limit()) {
				// Found a match.
				int pos = in->position();
				in->position(oldPos);

				in->limit(pos);

				if (decodedBuffer == null) {
					decodedBuffer = EIoBuffer::allocate(in->remaining())->setAutoExpand(true);
				}

				decodedBuffer->put(in.get());
				decodedBuffer->flip();

				in->limit(oldLimit);
				ctx->reset();

				return decodedBuffer;
			}
		} else {
			in->position(ES_MAX(0, in->position() - matchCount));
			matchCount = 0;
		}
	}

	// Copy remainder from buf.
	if (in->remaining() > 0) {
		in->position(oldPos);
		decodedBuffer->put(in.get());
		in->position(in->limit());
	}

	// Save decoding state
	ctx->setMatchCount(matchCount);
	ctx->setDecodedBuffer(decodedBuffer);

	return decodedBuffer;
}
status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
    LOGV("setPreviewSurface: %p", surface.get());
    mPreviewSurface = surface;

    return OK;
}
Example #20
0
int main(int argc, char** argv)
{
    DecodeInput *input;
    int32_t fd = -1;
    int32_t i = 0;
    int32_t ioctlRet = -1;
    YamiMediaCodec::CalcFps calcFps;

    renderMode = 3; // set default render mode to 3

    yamiTraceInit();
#if __ENABLE_V4L2_GLX__
    XInitThreads();
#endif

#if __ENABLE_V4L2_OPS__
    // FIXME, use libv4l2codec_hw.so instead
    if (!loadV4l2CodecDevice("libyami_v4l2.so")) {
        ERROR("fail to init v4l2codec device with __ENABLE_V4L2_OPS__\n");
        return -1;
    }
#endif

    if (!process_cmdline(argc, argv))
        return -1;

    if (!inputFileName) {
        ERROR("no input media file specified\n");
        return -1;
    }
    INFO("input file: %s, renderMode: %d", inputFileName, renderMode);

    if (!dumpOutputName)
        dumpOutputName = strdup ("./");

#if !__ENABLE_V4L2_GLX__
    switch (renderMode) {
    case 0:
        memoryType = VIDEO_DATA_MEMORY_TYPE_RAW_COPY;
        memoryTypeStr = typeStrRawData;
    break;
    case 3:
        memoryType = VIDEO_DATA_MEMORY_TYPE_DRM_NAME;
        memoryTypeStr = typeStrDrmName;
    break;
    case 4:
        memoryType = VIDEO_DATA_MEMORY_TYPE_DMA_BUF;
        memoryTypeStr = typeStrDmaBuf;
    break;
    default:
        ASSERT(0 && "unsupported render mode, -m [0,3,4] are supported");
    break;
    }
#endif

    input = DecodeInput::create(inputFileName);
    if (input==NULL) {
        ERROR("fail to init input stream\n");
        return -1;
    }

    renderFrameCount = 0;
    calcFps.setAnchor();
    // open device
    fd = SIMULATE_V4L2_OP(Open)("decoder", 0);
    ASSERT(fd!=-1);

#ifdef ANDROID
#elif __ENABLE_V4L2_GLX__
    x11Display = XOpenDisplay(NULL);
    ASSERT(x11Display);
    ioctlRet = SIMULATE_V4L2_OP(SetXDisplay)(fd, x11Display);
#endif
    // set output frame memory type
#if __ENABLE_V4L2_OPS__
    SIMULATE_V4L2_OP(SetParameter)(fd, "frame-memory-type", memoryTypeStr);
#elif !__ENABLE_V4L2_GLX__
    SIMULATE_V4L2_OP(FrameMemoryType)(fd, memoryType);
#endif

    // query hw capability
    struct v4l2_capability caps;
    memset(&caps, 0, sizeof(caps));
    caps.capabilities = V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QUERYCAP, &caps);
    ASSERT(ioctlRet != -1);

    // set input/output data format
    uint32_t codecFormat = v4l2PixelFormatFromMime(input->getMimeType());
    if (!codecFormat) {
        ERROR("unsupported mimetype, %s", input->getMimeType());
        return -1;
    }

    struct v4l2_format format;
    memset(&format, 0, sizeof(format));
    format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    format.fmt.pix_mp.pixelformat = codecFormat;
    format.fmt.pix_mp.num_planes = 1;
    format.fmt.pix_mp.plane_fmt[0].sizeimage = k_maxInputBufferSize;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_S_FMT, &format);
    ASSERT(ioctlRet != -1);

    // set preferred output format
    memset(&format, 0, sizeof(format));
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    uint8_t* data = (uint8_t*)input->getCodecData().data();
    uint32_t size = input->getCodecData().size();
    //save codecdata, size+data, the type of format.fmt.raw_data is __u8[200]
    //we must make sure enough space (>=sizeof(uint32_t) + size) to store codecdata
    memcpy(format.fmt.raw_data, &size, sizeof(uint32_t));
    if(sizeof(format.fmt.raw_data) >= size + sizeof(uint32_t))
        memcpy(format.fmt.raw_data + sizeof(uint32_t), data, size);
    else {
        ERROR("No enough space to store codec data");
        return -1;
    }
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_S_FMT, &format);
    ASSERT(ioctlRet != -1);

    // input port starts as early as possible to decide output frame format
    __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMON, &type);
    ASSERT(ioctlRet != -1);

    // setup input buffers
    struct v4l2_requestbuffers reqbufs;
    memset(&reqbufs, 0, sizeof(reqbufs));
    reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    reqbufs.memory = V4L2_MEMORY_MMAP;
    reqbufs.count = 2;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs);
    ASSERT(ioctlRet != -1);
    ASSERT(reqbufs.count>0);
    inputQueueCapacity = reqbufs.count;
    inputFrames.resize(inputQueueCapacity);

    for (i=0; i<inputQueueCapacity; i++) {
        struct v4l2_plane planes[k_inputPlaneCount];
        struct v4l2_buffer buffer;
        memset(&buffer, 0, sizeof(buffer));
        memset(planes, 0, sizeof(planes));
        buffer.index = i;
        buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.m.planes = planes;
        buffer.length = k_inputPlaneCount;
        ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QUERYBUF, &buffer);
        ASSERT(ioctlRet != -1);

        // length and mem_offset should be filled by VIDIOC_QUERYBUF above
        void* address = SIMULATE_V4L2_OP(Mmap)(NULL,
                                      buffer.m.planes[0].length,
                                      PROT_READ | PROT_WRITE,
                                      MAP_SHARED, fd,
                                      buffer.m.planes[0].m.mem_offset);
        ASSERT(address);
        inputFrames[i] = static_cast<uint8_t*>(address);
        DEBUG("inputFrames[%d] = %p", i, inputFrames[i]);
    }

    // feed input frames first
    for (i=0; i<inputQueueCapacity; i++) {
        if (!feedOneInputFrame(input, fd, i)) {
            break;
        }
    }

    // query video resolution
    memset(&format, 0, sizeof(format));
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    while (1) {
        if (SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_G_FMT, &format) != 0) {
            if (errno != EINVAL) {
                // EINVAL means we haven't seen sufficient stream to decode the format.
                INFO("ioctl() failed: VIDIOC_G_FMT, haven't get video resolution during start yet, waiting");
            }
        } else {
            break;
        }
        usleep(50);
    }
    outputPlaneCount = format.fmt.pix_mp.num_planes;
    ASSERT(outputPlaneCount == 2);
    videoWidth = format.fmt.pix_mp.width;
    videoHeight = format.fmt.pix_mp.height;
    ASSERT(videoWidth && videoHeight);

#ifdef ANDROID
    __u32 pixelformat = format.fmt.pix_mp.pixelformat;
    if (!createNativeWindow(pixelformat)) {
        fprintf(stderr, "create native window error\n");
        return -1;
    }

    int minUndequeuedBuffs = 0;
    status_t err = mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffs);
    if (err != 0) {
        fprintf(stderr, "query native window min undequeued buffers error\n");
        return err;
    }
#endif

    // setup output buffers
    // Number of output buffers we need.
    struct v4l2_control ctrl;
    memset(&ctrl, 0, sizeof(ctrl));
    ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_G_CTRL, &ctrl);
#ifndef ANDROID
    uint32_t minOutputFrameCount = ctrl.value + k_extraOutputFrameCount;
#else
    uint32_t minOutputFrameCount = ctrl.value + k_extraOutputFrameCount + minUndequeuedBuffs;
#endif

    memset(&reqbufs, 0, sizeof(reqbufs));
    reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    reqbufs.memory = V4L2_MEMORY_MMAP;
    reqbufs.count = minOutputFrameCount;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs);
    ASSERT(ioctlRet != -1);
    ASSERT(reqbufs.count>0);
    outputQueueCapacity = reqbufs.count;

#ifdef ANDROID
#elif __ENABLE_V4L2_GLX__
    x11Window = XCreateSimpleWindow(x11Display, DefaultRootWindow(x11Display)
        , 0, 0, videoWidth, videoHeight, 0, 0
        , WhitePixel(x11Display, 0));
    XMapWindow(x11Display, x11Window);
    pixmaps.resize(outputQueueCapacity);
    glxPixmaps.resize(outputQueueCapacity);
    textureIds.resize(outputQueueCapacity);

    if (!glxContext) {
        glxContext = glxInit(x11Display, x11Window);
    }
    ASSERT(glxContext);

    glGenTextures(outputQueueCapacity, &textureIds[0] );
    for (i=0; i<outputQueueCapacity; i++) {
        int ret = createPixmapForTexture(glxContext, textureIds[i], videoWidth, videoHeight, &pixmaps[i], &glxPixmaps[i]);
        DEBUG("textureIds[%d]: 0x%x, pixmaps[%d]=0x%lx, glxPixmaps[%d]: 0x%lx", i, textureIds[i], i, pixmaps[i], i, glxPixmaps[i]);
        ASSERT(ret == 0);
        ret = SIMULATE_V4L2_OP(UsePixmap)(fd, i, pixmaps[i]);
        ASSERT(ret == 0);
    }
#else
    if (IS_RAW_DATA()) {
        rawOutputFrames.resize(outputQueueCapacity);
        for (i=0; i<outputQueueCapacity; i++) {
            struct v4l2_plane planes[k_maxOutputPlaneCount];
            struct v4l2_buffer buffer;
            memset(&buffer, 0, sizeof(buffer));
            memset(planes, 0, sizeof(planes));
            buffer.index = i;
            buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
            buffer.memory = V4L2_MEMORY_MMAP;
            buffer.m.planes = planes;
            buffer.length = outputPlaneCount;
            ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QUERYBUF, &buffer);
            ASSERT(ioctlRet != -1);

            rawOutputFrames[i].width = format.fmt.pix_mp.width;
            rawOutputFrames[i].height = format.fmt.pix_mp.height;
            rawOutputFrames[i].fourcc = format.fmt.pix_mp.pixelformat;

            for (int j=0; j<outputPlaneCount; j++) {
                // length and mem_offset are filled by VIDIOC_QUERYBUF above
                void* address = SIMULATE_V4L2_OP(Mmap)(NULL,
                                              buffer.m.planes[j].length,
                                              PROT_READ | PROT_WRITE,
                                              MAP_SHARED, fd,
                                              buffer.m.planes[j].m.mem_offset);
                ASSERT(address);
                if (j == 0) {
                    rawOutputFrames[i].data = static_cast<uint8_t*>(address);
                    rawOutputFrames[i].offset[0] = 0;
                } else {
                    rawOutputFrames[i].offset[j] = static_cast<uint8_t*>(address) - rawOutputFrames[i].data;
                }

                rawOutputFrames[i].pitch[j] = format.fmt.pix_mp.plane_fmt[j].bytesperline;
            }
        }
    } else if (IS_DMA_BUF() || IS_DRM_NAME()) {
        // setup all textures and eglImages
        eglImages.resize(outputQueueCapacity);
        textureIds.resize(outputQueueCapacity);

        if (!eglContext)
            eglContext = eglInit(x11Display, x11Window, 0 /*VA_FOURCC_RGBA*/, IS_DMA_BUF());

        glGenTextures(outputQueueCapacity, &textureIds[0] );
        for (i=0; i<outputQueueCapacity; i++) {
             int ret = 0;
             ret = SIMULATE_V4L2_OP(UseEglImage)(fd, eglContext->eglContext.display, eglContext->eglContext.context, i, &eglImages[i]);
             ASSERT(ret == 0);

             GLenum target = GL_TEXTURE_2D;
             if (IS_DMA_BUF())
                 target = GL_TEXTURE_EXTERNAL_OES;
             glBindTexture(target, textureIds[i]);
             imageTargetTexture2D(target, eglImages[i]);

             glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
             glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
             DEBUG("textureIds[%d]: 0x%x, eglImages[%d]: 0x%p", i, textureIds[i], i, eglImages[i]);
        }
    }
#endif

#ifndef ANDROID
    // feed output frames first
    for (i=0; i<outputQueueCapacity; i++) {
        if (!takeOneOutputFrame(fd, i)) {
            ASSERT(0);
        }
    }
#else
    struct v4l2_buffer buffer;

    err = native_window_set_buffer_count(mNativeWindow.get(), outputQueueCapacity);
    if (err != 0) {
        fprintf(stderr, "native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
        return -1;
    }

    //queue buffs
    for (uint32_t i = 0; i < outputQueueCapacity; i++) {
        ANativeWindowBuffer* pbuf = NULL;
        memset(&buffer, 0, sizeof(buffer));

        err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &pbuf);
        if (err != 0) {
            fprintf(stderr, "dequeueBuffer failed: %s (%d)\n", strerror(-err), -err);
            return -1;
        }

        buffer.m.userptr = (unsigned long)pbuf;
        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        buffer.index = i;

        ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QBUF, &buffer);
        ASSERT(ioctlRet != -1);
        mWindBuff.push_back(pbuf);
    }

    for (uint32_t i = 0; i < minUndequeuedBuffs; i++) {
        memset(&buffer, 0, sizeof(buffer));
        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

        ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_DQBUF, &buffer);
        ASSERT(ioctlRet != -1);

        err = mNativeWindow->cancelBuffer(mNativeWindow.get(), mWindBuff[buffer.index], -1);
        if (err) {
            fprintf(stderr, "queue empty window buffer error\n");
            return -1;
        }
    }
#endif

    // output port starts as late as possible to adopt user provide output buffer
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMON, &type);
    ASSERT(ioctlRet != -1);

    bool event_pending=true; // try to get video resolution.
    int dqCountAfterEOS = 0;
    do {
        if (event_pending) {
            handleResolutionChange(fd);
        }

        takeOneOutputFrame(fd);
        if (!feedOneInputFrame(input, fd)) {
            if (stagingBufferInDevice == 0)
                break;
            dqCountAfterEOS++;
        }
        if (dqCountAfterEOS == inputQueueCapacity)  // input drain
            break;
    } while (SIMULATE_V4L2_OP(Poll)(fd, true, &event_pending) == 0);

    // drain output buffer
    int retry = 3;
    while (takeOneOutputFrame(fd) || (--retry)>0) { // output drain
        usleep(10000);
    }

    calcFps.fps(renderFrameCount);
    // SIMULATE_V4L2_OP(Munmap)(void* addr, size_t length)
    possibleWait(input->getMimeType());

    // release queued input/output buffer
    memset(&reqbufs, 0, sizeof(reqbufs));
    reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    reqbufs.memory = V4L2_MEMORY_MMAP;
    reqbufs.count = 0;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs);
    ASSERT(ioctlRet != -1);

    memset(&reqbufs, 0, sizeof(reqbufs));
    reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    reqbufs.memory = V4L2_MEMORY_MMAP;
    reqbufs.count = 0;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs);
    ASSERT(ioctlRet != -1);

    // stop input port
    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMOFF, &type);
    ASSERT(ioctlRet != -1);

    // stop output port
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMOFF, &type);
    ASSERT(ioctlRet != -1);

#ifndef ANDROID
    if(textureIds.size())
        glDeleteTextures(textureIds.size(), &textureIds[0]);
    ASSERT(glGetError() == GL_NO_ERROR);
#endif

#ifdef ANDROID
    //TODO, some resources need to destroy?
#elif __ENABLE_V4L2_GLX__
    glxRelease(glxContext, &pixmaps[0], &glxPixmaps[0], pixmaps.size());
#else
    for (i=0; i<eglImages.size(); i++) {
        destroyImage(eglContext->eglContext.display, eglImages[i]);
    }
    /*
    there is still randomly fail in mesa; no good idea for it. seems mesa bug
    0  0x00007ffff079c343 in _mesa_symbol_table_dtor () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1
    1  0x00007ffff073c55d in glsl_symbol_table::~glsl_symbol_table() () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1
    2  0x00007ffff072a4d5 in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1
    3  0x00007ffff072a4bd in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1
    4  0x00007ffff064b48f in _mesa_reference_shader () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1
    5  0x00007ffff0649397 in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1
    6  0x000000000040624d in releaseShader (program=0x77cd90) at ./egl/gles2_help.c:158
    7  eglRelease (context=0x615920) at ./egl/gles2_help.c:310
    8  0x0000000000402ca8 in main (argc=<optimized out>, argv=<optimized out>) at v4l2decode.cpp:531
    */
    if (eglContext)
        eglRelease(eglContext);
#endif

    // close device
    ioctlRet = SIMULATE_V4L2_OP(Close)(fd);
    ASSERT(ioctlRet != -1);

    if(input)
        delete input;

    if (outfp)
        fclose(outfp);

    if (dumpOutputName)
        free(dumpOutputName);

#if __ENABLE_V4L2_GLX__
    if (x11Display && x11Window)
        XDestroyWindow(x11Display, x11Window);
    if (x11Display)
        XCloseDisplay(x11Display);
#endif

    fprintf(stdout, "decode done\n");
}
EGLNativeWindowType QEglFSPandaHooks::createNativeWindowFramebuffer(const QSize &size, const QSurfaceFormat &)
{
    Q_UNUSED(size);
    ensureFramebufferNativeWindowCreated();
    return mFramebufferNativeWindow.get();
}
Example #22
0
static MUINT32 u4Capture_Cmd(int argc, char** argv)
{
    MUINT32 u4Transform;

    switch(g_u4Rot)
    {
        case 0:
            u4Transform = 0;
            break;
        case 1:
            u4Transform = eTransform_ROT_90;
            break;
        case 2:
            u4Transform = eTransform_ROT_180;
            break;
        case 3:
            u4Transform = eTransform_ROT_270;
            break;
        default:
            u4Transform = 0;
            break;
    }

    ISingleShot *pSingleShot = ISingleShot::createInstance(eShotMode_NormalShot, "testshot");
    //
    pSingleShot->init();
    //
    pSingleShot->enableDataMsg( g_u4EnableMsg );
    // set buffer
    //
    // register buffer
    allocateMem(g_u4RegisterDataMsg);
//    //
    if( g_u4RegisterDataMsg & ECamShot_BUF_TYPE_RAW )
        pSingleShot->registerImageBuffer(ECamShot_BUF_TYPE_RAW, g_ImgBufRaw.get());
    if( g_u4RegisterDataMsg & ECamShot_BUF_TYPE_YUV )
        pSingleShot->registerImageBuffer(ECamShot_BUF_TYPE_YUV, g_ImgBufYuv.get());
    if( g_u4RegisterDataMsg & ECamShot_BUF_TYPE_POSTVIEW )
        pSingleShot->registerImageBuffer(ECamShot_BUF_TYPE_POSTVIEW, g_ImgBufPostview.get());
    if( g_u4RegisterDataMsg & ECamShot_BUF_TYPE_JPEG )
        pSingleShot->registerImageBuffer(ECamShot_BUF_TYPE_JPEG, g_ImgBufJpeg.get());

//    pSingleShot->registerImgBufInfo(ECamShot_BUF_TYPE_YUV, g_rYuvBufInfo);
//    pSingleShot->registerImgBufInfo(ECamShot_BUF_TYPE_POSTVIEW, g_rPostViewBufInfo);
//    pSingleShot->registerImgBufInfo(ECamShot_BUF_TYPE_JPEG, g_rJpegBufInfo);


    // shot param
    ShotParam rShotParam(eImgFmt_YUY2,           //yuv format
                         g_u4Width,              //picutre width
                         g_u4Height,             //picture height
                         u4Transform,            //picutre transform
                         eImgFmt_YV12,           //postview format
                         800,                    //postview width
                         480,                    //postview height
                         0,                      //postview transform
                         100                     //zoom
                        );

    // jpeg param
    JpegParam rJpegParam(ThumbnailParam(160, 128, 100, MTRUE),
                         90,                     //Quality
                         MTRUE                   //isSOI
                        );

    // thumbnail param
    ThumbnailParam rThumbnailParam(160,          // thumbnail width
                                   128,          // thumbnail height
                                   100,          // quality
                                   MTRUE         // isSOI
                                  );

    // sensor param
    SensorParam rSensorParam(
            g_sensorIdx,                         //open ID
            g_u4Mode == 0 ? 
            SENSOR_SCENARIO_ID_NORMAL_PREVIEW : 
            SENSOR_SCENARIO_ID_NORMAL_CAPTURE,   //Scenaio
            10,                                  //bit depth
            MFALSE,                              //bypass delay
            MFALSE,                              //bypass scenario
            u4RawType                            //rawType
            );
    // update sensor's size
    if( g_u4Mode == 0 ) // preview
    {
        g_u4SensorWidth  = g_pSensorInfo[g_sensorIdx].previewWidth;
        g_u4SensorHeight = g_pSensorInfo[g_sensorIdx].previewHeight;
    }
    else if( g_u4Mode == 1 )//capture
    {
        g_u4SensorWidth  = g_pSensorInfo[g_sensorIdx].captureWidth;
        g_u4SensorHeight = g_pSensorInfo[g_sensorIdx].captureHeight;
    }
    //
    pSingleShot->setCallbacks(fgCamShotNotifyCb, fgCamShotDataCb, NULL);
    //
    pSingleShot->setShotParam(rShotParam);
    //
    pSingleShot->setJpegParam(rJpegParam);
    //
    //
    u4CapCnt = 0;
    for (MUINT32 i = 0 ; i < g_u4ShotCnt; i++)
    {
        printf("startOne count(0x%x)+\n", i);
        pSingleShot->startOne(rSensorParam);
        printf("startOne count(0x%x)-\n", i);
        u4CapCnt++;
    }
    //
    pSingleShot->uninit();
    //
    pSingleShot->destroyInstance();

    freeMem();

    return 0;
}
status_t BufferQueueConsumer::releaseBuffer(int slot, uint64_t frameNumber,
        const sp<Fence>& releaseFence, EGLDisplay eglDisplay,
        EGLSyncKHR eglFence) {
    ATRACE_CALL();
    ATRACE_BUFFER_INDEX(slot);

    if (slot < 0 || slot >= BufferQueueDefs::NUM_BUFFER_SLOTS ||
            releaseFence == NULL) {
        BQ_LOGE("releaseBuffer: slot %d out of range or fence %p NULL", slot,
                releaseFence.get());
        return BAD_VALUE;
    }

    sp<IProducerListener> listener;
    { // Autolock scope
        Mutex::Autolock lock(mCore->mMutex);

        // If the frame number has changed because the buffer has been reallocated,
        // we can ignore this releaseBuffer for the old buffer
        if (frameNumber != mSlots[slot].mFrameNumber) {
            return STALE_BUFFER_SLOT;
        }

        // Make sure this buffer hasn't been queued while acquired by the consumer
        BufferQueueCore::Fifo::iterator current(mCore->mQueue.begin());
        while (current != mCore->mQueue.end()) {
            if (current->mSlot == slot) {
                BQ_LOGE("releaseBuffer: buffer slot %d pending release is "
                        "currently queued", slot);
                return BAD_VALUE;
            }
            ++current;
        }

        if (mSlots[slot].mBufferState == BufferSlot::ACQUIRED) {
            mSlots[slot].mEglDisplay = eglDisplay;
            mSlots[slot].mEglFence = eglFence;
            mSlots[slot].mFence = releaseFence;
            mSlots[slot].mBufferState = BufferSlot::FREE;
            mCore->mFreeBuffers.push_back(slot);
            listener = mCore->mConnectedProducerListener;
            BQ_LOGV("releaseBuffer: releasing slot %d", slot);
        } else if (mSlots[slot].mNeedsCleanupOnRelease) {
            BQ_LOGV("releaseBuffer: releasing a stale buffer slot %d "
                    "(state = %d)", slot, mSlots[slot].mBufferState);
            mSlots[slot].mNeedsCleanupOnRelease = false;
            return STALE_BUFFER_SLOT;
        } else {
            BQ_LOGE("releaseBuffer: attempted to release buffer slot %d "
                    "but its state was %d", slot, mSlots[slot].mBufferState);
            return BAD_VALUE;
        }

        mCore->mDequeueCondition.broadcast();
        mCore->validateConsistencyLocked();
    } // Autolock scope

    // Call back without lock held
    if (listener != NULL) {
        listener->onBufferReleased();
    }

    return NO_ERROR;
}
 BpListenReceiver(const sp<IBinder>& impl)
     : BpInterface<IListenReceiver>(impl)
 {
    ALOGD("BnListenReceiver::constructor - impl=%p, this=%p", impl.get(), this);
 }
bool OmxDecoder::Init() {
  //register sniffers, if they are not registered in this process.
  DataSource::RegisterDefaultSniffers();

  sp<DataSource> dataSource = new MediaStreamSource(mPluginHost, mDecoder);
  if (dataSource->initCheck()) {
    return false;
  }

  mPluginHost->SetMetaDataReadMode(mDecoder);

  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
  if (extractor == NULL) {
    return false;
  }

  ssize_t audioTrackIndex = -1;
  ssize_t videoTrackIndex = -1;
  const char *audioMime = NULL;
  const char *videoMime = NULL;

  for (size_t i = 0; i < extractor->countTracks(); ++i) {
    sp<MetaData> meta = extractor->getTrackMetaData(i);

    const char *mime;
    if (!meta->findCString(kKeyMIMEType, &mime)) {
      continue;
    }

    if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
      videoTrackIndex = i;
      videoMime = mime;
    } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
      audioTrackIndex = i;
      audioMime = mime;
    }
  }

  if (videoTrackIndex == -1 && audioTrackIndex == -1) {
    return false;
  }

  mPluginHost->SetPlaybackReadMode(mDecoder);

  int64_t totalDurationUs = 0;

#ifdef MOZ_WIDGET_GONK
  sp<IOMX> omx = GetOMX();
#else
  // OMXClient::connect() always returns OK and abort's fatally if
  // it can't connect. We may need to implement the connect functionality
  // ourselves if this proves to be an issue.
  if (mClient.connect() != OK) {
    LOG("OMXClient failed to connect");
  }
  sp<IOMX> omx = mClient.interface();
#endif

  sp<MediaSource> videoTrack;
  sp<MediaSource> videoSource;
  if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != NULL) {
    uint32_t flags = GetVideoCreationFlags(mPluginHost);
    videoSource = OMXCodec::Create(omx,
                                   videoTrack->getFormat(),
                                   false, // decoder
                                   videoTrack,
                                   NULL,
                                   flags);
    if (videoSource == NULL) {
      LOG("OMXCodec failed to initialize video decoder for \"%s\"", videoMime);
      return false;
    }

    status_t status = videoSource->start();
    if (status != OK) {
      LOG("videoSource->start() failed with status %#x", status);
      return false;
    }

    int64_t durationUs;
    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs < 0)
        LOG("video duration %lld should be nonnegative", durationUs);
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  sp<MediaSource> audioTrack;
  sp<MediaSource> audioSource;
  if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != NULL)
  {
    if (!strcasecmp(audioMime, "audio/raw")) {
      audioSource = audioTrack;
    } else {
      audioSource = OMXCodec::Create(omx,
                                     audioTrack->getFormat(),
                                     false, // decoder
                                     audioTrack);
    }

    if (audioSource == NULL) {
      LOG("OMXCodec failed to initialize audio decoder for \"%s\"", audioMime);
      return false;
    }

    status_t status = audioSource->start();
    if (status != OK) {
      LOG("audioSource->start() failed with status %#x", status);
      return false;
    }

    int64_t durationUs;
    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs < 0)
        LOG("audio duration %lld should be nonnegative", durationUs);
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  // set decoder state
  mVideoTrack = videoTrack;
  mVideoSource = videoSource;
  mAudioTrack = audioTrack;
  mAudioSource = audioSource;
  mDurationUs = totalDurationUs;

  if (mVideoSource.get() && !SetVideoFormat())
    return false;

  // To reliably get the channel and sample rate data we need to read from the
  // audio source until we get a INFO_FORMAT_CHANGE status
  if (mAudioSource.get()) {
    if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) {
      sp<MetaData> meta = mAudioSource->getFormat();
      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
        return false;
      }
      mAudioMetadataRead = true;

      if (mAudioChannels < 0) {
        LOG("audio channel count %d must be nonnegative", mAudioChannels);
        return false;
      }

      if (mAudioSampleRate < 0) {
        LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
        return false;
      }
    }
    else if (!SetAudioFormat()) {
        return false;
    }
  }
  return true;
}
void
FakeSurfaceComposer::captureScreenImp(const sp<IGraphicBufferProducer>& producer,
                                      uint32_t reqWidth,
                                      uint32_t reqHeight,
                                      const sp<GraphicProducerWrapper>& wrapper)
{
    MOZ_ASSERT(NS_IsMainThread());
    MOZ_ASSERT(wrapper.get());

    RefPtr<nsScreenGonk> screen = nsScreenManagerGonk::GetPrimaryScreen();

    // get screen geometry
    nsIntRect screenBounds = screen->GetNaturalBounds().ToUnknownRect();
    const uint32_t hw_w = screenBounds.width;
    const uint32_t hw_h = screenBounds.height;

    if (reqWidth > hw_w || reqHeight > hw_h) {
        ALOGE("size mismatch (%d, %d) > (%d, %d)",
                reqWidth, reqHeight, hw_w, hw_h);
        static_cast<GraphicProducerWrapper*>(producer->asBinder().get())->exit(BAD_VALUE);
        return;
    }

    reqWidth  = (!reqWidth)  ? hw_w : reqWidth;
    reqHeight = (!reqHeight) ? hw_h : reqHeight;

    nsScreenGonk* screenPtr = screen.forget().take();
    nsCOMPtr<nsIRunnable> runnable =
        NS_NewRunnableFunction([screenPtr, reqWidth, reqHeight, producer, wrapper]() {
            // create a surface (because we're a producer, and we need to
            // dequeue/queue a buffer)
            sp<Surface> sur = new Surface(producer);
            ANativeWindow* window = sur.get();

            if (native_window_api_connect(window, NATIVE_WINDOW_API_EGL) != NO_ERROR) {
                static_cast<GraphicProducerWrapper*>(producer->asBinder().get())->exit(BAD_VALUE);
                NS_ReleaseOnMainThread(screenPtr);
                return;
            }
            uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
                             GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;

            int err = 0;
            err = native_window_set_buffers_dimensions(window, reqWidth, reqHeight);
            err |= native_window_set_scaling_mode(window, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
            err |= native_window_set_buffers_format(window, HAL_PIXEL_FORMAT_RGBA_8888);
            err |= native_window_set_usage(window, usage);

            status_t result = NO_ERROR;
            if (err == NO_ERROR) {
                ANativeWindowBuffer* buffer;
                result = native_window_dequeue_buffer_and_wait(window,  &buffer);
                if (result == NO_ERROR) {
                    nsresult rv = screenPtr->MakeSnapshot(buffer);
                    if (rv != NS_OK) {
                        result = INVALID_OPERATION;
                    }
                    window->queueBuffer(window, buffer, -1);
                }
            } else {
                result = BAD_VALUE;
            }
            native_window_api_disconnect(window, NATIVE_WINDOW_API_EGL);
            static_cast<GraphicProducerWrapper*>(producer->asBinder().get())->exit(result);
            NS_ReleaseOnMainThread(screenPtr);
        });

    mozilla::layers::CompositorParent::CompositorLoop()->PostTask(
        FROM_HERE, new RunnableCallTask(runnable));
}
static sp<IOMX> GetOMX() {
  if(sOMX.get() == nullptr) {
    sOMX = new OMX;
    }
  return sOMX;
}
BaseObj::BaseObj(void *id, sp<RS> rs) {
    mRS = rs.get();
    mID = id;
}
static bool isValid(const sp<Fence>& fence) {
    return fence.get() && fence->isValid();
}
Example #30
0
void GuiExtPoolItem::ConsumerSlot::dump(String8& result) const
{
    result.appendFormat("[%02d] pid=%d, token=%p, observer=%p\n", idx, pid, token.get(), observer.get());
}