void CameraControl::postData(
		int32_t msg_type,
		const android::sp<android::IMemory>& data,
		camera_frame_metadata_t* metadata)
{
	REPORT_FUNCTION();

	if (!listener)
		return;

	switch (msg_type) {
	case CAMERA_MSG_RAW_IMAGE:
		if (listener->on_data_raw_image_cb)
			listener->on_data_raw_image_cb(data->pointer(), data->size(), listener->context);
		break;
	case CAMERA_MSG_COMPRESSED_IMAGE:
		if (listener->on_data_compressed_image_cb)
			listener->on_data_compressed_image_cb(data->pointer(), data->size(), listener->context);
		break;
	default:
		break;
	}

	camera->releaseRecordingFrame(data);
}
예제 #2
0
ANTStatus ant_tx_write(ANT_U8 *pucTxMessage,ANT_U8 ucMessageLength)
{
   AntPacket data;
   ANT_U8  packet_type;
   ALOGI("%s: start", __func__);
   packet_type = *pucTxMessage;
   ALOGV("%s: proto type  :%d", __func__, packet_type);
   if (anthci != nullptr)
   {
      data.setToExternal(pucTxMessage+1, ucMessageLength-1);
      if (packet_type == ANT_DATA_TYPE_PACKET)
      {
         auto hidl_daemon_status = anthci->sendAntData(data);
         if (!hidl_daemon_status.isOk())
         {
            ALOGE("%s:sendAntData failed,HIDL dead", __func__);
            return -1;
         }
      } else {
         auto hidl_daemon_status = anthci->sendAntControl(data);
         if (!hidl_daemon_status.isOk())
         {
            ALOGE("%s:sendAntControl failed,HIDL dead", __func__);
            return -1;
         }
      }
   } else {
      ALOGE("%s: antHci is NULL", __func__);
      return -1;
   }
   ALOGI("%s: exit", __func__);
   return ucMessageLength;
}
예제 #3
0
bool
TextureHostOGL::SetReleaseFence(const android::sp<android::Fence>& aReleaseFence)
{
  if (!aReleaseFence.get() || !aReleaseFence->isValid()) {
    // HWC might not provide Fence.
    // In this case, HWC implicitly handles buffer's fence.
    return false;
  }

  if (!mReleaseFence.get()) {
    mReleaseFence = aReleaseFence;
  } else {
    android::sp<android::Fence> mergedFence = android::Fence::merge(
                  android::String8::format("TextureHostOGL"),
                  mReleaseFence, aReleaseFence);
    if (!mergedFence.get()) {
      // synchronization is broken, the best we can do is hope fences
      // signal in order so the new fence will act like a union.
      // This error handling is same as android::ConsumerBase does.
      mReleaseFence = aReleaseFence;
      return false;
    }
    mReleaseFence = mergedFence;
  }
  return true;
}
예제 #4
0
MERROR
PipelineModelBase::
submitRequestFrame(
    android::sp<IPipelineFrame> pFrame
)
{
    sp<IPipelineNodeMap const> pPipelineNodeMap = pFrame->getPipelineNodeMap();
    if  ( pPipelineNodeMap == 0 || pPipelineNodeMap->isEmpty() ) {
        MY_LOGE("[frameNo:%d] Bad PipelineNodeMap:%p", pFrame->getFrameNo(), pPipelineNodeMap.get());
        return DEAD_OBJECT;
    }
    //
    IPipelineDAG::NodeObj_T const nodeObj = pFrame->getPipelineDAG().getRootNode();
    sp<IPipelineNode> pNode = pPipelineNodeMap->nodeAt(nodeObj.val);
    if  ( pNode == 0 ) {
        MY_LOGE("[frameNo:%d] Bad root node", pFrame->getFrameNo());
        return DEAD_OBJECT;
    }

    MERROR err = OK;
    RWLock::AutoRLock _l(mflushLock);
    if(mInFlush == MTRUE) {
        err = pNode->flush(pFrame);
    } else {
        err = pNode->queue(pFrame);
    }

    return err;
}
int OpenSLMediaPlayerVisualizerJNIBinder::unbind(const android::sp<OpenSLMediaPlayerVisualizer> &visualizer) noexcept
{
    if (!visualizer.get())
        return OSLMP_RESULT_ILLEGAL_ARGUMENT;

    return visualizer->setInternalPeriodicCaptureThreadEventListener(nullptr, 0, false, false);
}
 ~VisualizerJniContextHolder()
 {
     if (binder.get()) {
         binder->unbind(visualizer);
     }
     binder.clear();
     visualizer.clear();
 }
int OpenSLMediaPlayerHQVisualizerJNIBinder::bind(const android::sp<OpenSLMediaPlayerHQVisualizer> &visualizer,
                                                 uint32_t rate, bool waveform, bool fft) noexcept
{

    if (!visualizer.get())
        return OSLMP_RESULT_ILLEGAL_ARGUMENT;

    return visualizer->setInternalPeriodicCaptureThreadEventListener(this, rate, waveform, fft);
}
/**
 * pre-condition: gp != 0
 */
XAresult android_Player_setPlayState(const android::sp<android::GenericPlayer> &gp,
        SLuint32 playState,
        AndroidObjectState* pObjState)
{
    XAresult result = XA_RESULT_SUCCESS;
    AndroidObjectState objState = *pObjState;

    switch (playState) {
     case SL_PLAYSTATE_STOPPED: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_STOPPED");
         gp->stop();
         }
         break;
     case SL_PLAYSTATE_PAUSED: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PAUSED");
         switch(objState) {
         case ANDROID_UNINITIALIZED:
             *pObjState = ANDROID_PREPARING;
             gp->prepare();
             break;
         case ANDROID_PREPARING:
             break;
         case ANDROID_READY:
             gp->pause();
             break;
         default:
             SL_LOGE("Android object in invalid state");
             break;
         }
         }
         break;
     case SL_PLAYSTATE_PLAYING: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PLAYING");
         switch(objState) {
         case ANDROID_UNINITIALIZED:
             *pObjState = ANDROID_PREPARING;
             gp->prepare();
             // intended fall through
         case ANDROID_PREPARING:
             // intended fall through
         case ANDROID_READY:
             gp->play();
             break;
         default:
             SL_LOGE("Android object in invalid state");
             break;
         }
         }
         break;
     default:
         // checked by caller, should not happen
         break;
     }

    return result;
}
    android::status_t setVideoSurfaceTexture(const android::sp<android::SurfaceTexture> &surfaceTexture)
    {
        REPORT_FUNCTION();

        surfaceTexture->getBufferQueue()->setBufferCount(5);
        texture = surfaceTexture;
        texture->setFrameAvailableListener(frame_listener);

        return MediaPlayer::setVideoSurfaceTexture(surfaceTexture->getBufferQueue());
    }
static void SurfaceTexture_setSurfaceTexture(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<android::SurfaceTexture>& surfaceTexture)
{
    android::SurfaceTexture* const p = (android::SurfaceTexture*)thiz->mSurfaceTexture;
    if (surfaceTexture.get()) {
        surfaceTexture->incStrong(thiz);
    }
    if (p) {
        p->decStrong(thiz);
    }
    thiz->mSurfaceTexture = (Int32)surfaceTexture.get();
}
예제 #11
0
static void SurfaceTexture_setSurfaceTexture(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<GLConsumer>& surfaceTexture)
{
    GLConsumer* const p = (GLConsumer*)thiz->mSurfaceTexture;
    if (surfaceTexture.get()) {
        surfaceTexture->incStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    thiz->mSurfaceTexture = (Int64)surfaceTexture.get();
}
예제 #12
0
bool AudioPlaybackLocal::doPlaybackOrRecord(android::sp<Buffer>& buffer)
{
    if (buffer->amountToHandle() < (size_t)mSizes) {
        mSizes = buffer->amountToHandle();
    }
    if (pcm_write(mPcmHandle, buffer->getUnhanledData(), mSizes)) {
        LOGE("AudioPlaybackLocal error %s", pcm_get_error(mPcmHandle));
        return false;
    }
    buffer->increaseHandled(mSizes);
    LOGV("AudioPlaybackLocal::doPlaybackOrRecord %d", buffer->amountHandled());
    return true;
}
예제 #13
0
static void SurfaceTexture_setFrameAvailableListener(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ android::sp<GLConsumer::FrameAvailableListener> listener)
{
    GLConsumer::FrameAvailableListener* const p = (GLConsumer::FrameAvailableListener*)thiz->mFrameAvailableListener;
    if (listener.get()) {
        listener->incStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    thiz->mFrameAvailableListener = (Int64)listener.get();
}
예제 #14
0
static void SurfaceTexture_setProducer(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<IGraphicBufferProducer>& producer)
{
    IGraphicBufferProducer* const p =
        (IGraphicBufferProducer*)thiz->mProducer;
    if (producer.get()) {
        producer->incStrong((void*)SurfaceTexture_setProducer);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setProducer);
    }
    thiz->mProducer = (Int64)producer.get();
}
예제 #15
0
/**
 * pre-condition:
 *    ap != NULL
 *    for media players:
 *      ap->mAPlayer != 0
 *      ap->mTrackPlayer->mAudioTrack == 0
 *    for buffer queue players:
 *      ap->mAPlayer == 0
 *      ap->mTrackPlayer->mAudioTrack != 0 is optional; if no track yet then the setting is deferred
 */
android::status_t android_fxSend_attach(CAudioPlayer* ap, bool attach,
        const android::sp<android::AudioEffect>& pFx, SLmillibel sendLevel) {

    if (pFx == 0) {
        return android::INVALID_OPERATION;
    }

    // There are 3 cases:
    //  mAPlayer != 0 && mAudioTrack == 0 means playing decoded audio
    //  mAPlayer == 0 && mAudioTrack != 0 means playing PCM audio
    //  mAPlayer == 0 && mAudioTrack == 0 means player not fully configured yet
    // The asserts document and verify this.
    if (ap->mAPlayer != 0) {
        assert(ap->mTrackPlayer->mAudioTrack == 0);
        if (attach) {
            ap->mAPlayer->attachAuxEffect(pFx->id());
            ap->mAPlayer->setAuxEffectSendLevel( sles_to_android_amplification(sendLevel) );
        } else {
            ap->mAPlayer->attachAuxEffect(0);
        }
        return android::NO_ERROR;
    }

    if (ap->mTrackPlayer->mAudioTrack == 0) {
        // the player doesn't have an AudioTrack at the moment, so store this info to use it
        // when the AudioTrack becomes available
        if (attach) {
            ap->mAuxEffect = pFx;
        } else {
            ap->mAuxEffect.clear();
        }
        // we keep track of the send level, independently of the current audio player level
        ap->mAuxSendLevel = sendLevel - ap->mVolume.mLevel;
        return android::NO_ERROR;
    }

    if (attach) {
        android::status_t status = ap->mTrackPlayer->mAudioTrack->attachAuxEffect(pFx->id());
        //SL_LOGV("attachAuxEffect(%d) returned %d", pFx->id(), status);
        if (android::NO_ERROR == status) {
            status =
                ap->mTrackPlayer->mAudioTrack->setAuxEffectSendLevel(
                        sles_to_android_amplification(sendLevel) );
        }
        return status;
    } else {
        return ap->mTrackPlayer->mAudioTrack->attachAuxEffect(0);
    }
}
예제 #16
0
    virtual void onDisplayConnected(
        /* [in] */ const android::sp<android::ISurfaceTexture>& surfaceTexture,
        /* [in] */ uint32_t width,
        /* [in] */ uint32_t height,
        /* [in] */ uint32_t flags)
    {
        if (surfaceTexture == NULL) {
            return;
        }

        android::sp<android::Surface> surface(new android::Surface(surfaceTexture));
        if (surface == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create Surface from surface texture %p provided by media server.",
                surfaceTexture.get());
            return;
        }

        AutoPtr<ISurface> surfaceObj;
        CSurface::New((ISurface**)&surfaceObj);
        if (surfaceObj == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create instance of Surface from ISurfaceTexture.");
            return;
        }

        surfaceObj->SetSurface((Handle32)surface.get());

        mRemoteDisplayObjGlobal->NotifyDisplayConnected(surfaceObj, width, height, flags);

        CheckAndClearExceptionFromCallback("notifyDisplayConnected");
    }
예제 #17
0
    virtual void onDisplayConnected(
        /* [in] */ const android::sp<android::IGraphicBufferProducer>& bufferProducer,
        /* [in] */ uint32_t width,
        /* [in] */ uint32_t height,
        /* [in] */ uint32_t flags,
        /* [in] */ uint32_t session)
    {
        if (bufferProducer == NULL) {
            return;
        }

        android::sp<android::Surface> surface(new android::Surface(bufferProducer));
        if (surface == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create Surface from surface texture %p provided by media server.",
                bufferProducer.get());
            return;
        }

        AutoPtr<ISurface> surfaceObj;
        CSurface::New((ISurface**)&surfaceObj);
        if (surfaceObj == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create instance of Surface from ISurfaceTexture.");
            return;
        }

        mRemoteDisplayObjGlobal->NotifyDisplayConnected(surfaceObj, width, height, flags, session);

        CheckAndClearExceptionFromCallback("notifyDisplayConnected");
    }
int TestFrameworkClient::tf_write(const char *buf)
{
    int ret = 0, status = 0;
    bool send = false;

    status = tf_logging_status();

    switch(status) {
    case TF_LOGCAT:
        __android_log_write(ANDROID_LOG_ERROR, LOG_TAG, buf);
        break;
    case TF_TESTFRAMEWORK:
    case TF_ALL:
        send = true;
        break;
    case TF_DISABLE:
        break;
    }

    if (send) {
#ifdef TF_FEATURE_MSGS_THROUGH_BINDER
        mTfDispacther->DispatchMsg(buf);
#else
        ret = TfWrite(buf);
#endif
    }

    return ret;
}
    void doDownload() {
        android::sp<Buffer> buffer = AudioSignalFactory::generateZeroSound(AudioHardware::E2BPS, 2,
                false);
        uint32_t prepareSend[] = {
                U32_ENDIAN_SWAP(AudioProtocol::ECmdDownload),
                U32_ENDIAN_SWAP(8),
                U32_ENDIAN_SWAP(0), //id
                U32_ENDIAN_SWAP(0)
        };
        uint32_t prepareReply[] = {
                U32_ENDIAN_SWAP((AudioProtocol::ECmdDownload & 0xffff) | 0x43210000),
                0,
                0
        };
        LOGD("reply 0x%x", prepareReply[0]);

        mTestSocket.setSendExpectation((char*)prepareSend, sizeof(prepareSend));
        // this is reply, but set expectation for reply first as it is sent after send
        mTestSocket.setReadExpectation((char*)prepareReply, sizeof(prepareReply));

        int id = -1;
        android::String8 name("1");
        ASSERT_TRUE(mRemoteAudio->downloadData(name, buffer, id));
        ASSERT_TRUE(id >= 0);
    }
int TestFrameworkClient::tf_logging_status() {
    int status = TF_DISABLE, time_now = 0;
    bool timeout = 0;

    //probe frequency set to 1, so lets not probe
    //any params whatever set initially they will be used
    if (mProbeFreq <= 1) {
        return mLogType;
    }

    time_now = ns2ms(systemTime());
    timeout = (time_now - tfTs >= mProbeFreq);

    //eventhough binder doesn't incur much overhead, lets not use it
    //everytime, one in few milliseconds fetch data from tf service
    if (timeout) {
        if (TfIsServiceRunning()) {
            BpTestFramework::Connect(mTfDispacther);

            if (mTfDispacther != 0) {
                mTfDispacther->DispatchGetInfo(mLogType, mEventType,
                                              mOpenInterval, mClosedInterval);

                if (!mEventType) {
                    mTfDispacther->ConnectReset();
                }

                if ((mLogType != TF_DISABLE) && mTfDispacther->IsConnectedAgain()) {
                    TfGetPropertyFilters();
                }
            }
        }
        else {
            TfUpdate();
            if (mLogType != TF_DISABLE) {
                TfGetPropertyFilters();
            }
        }
        status = mLogType;
        tfTs = time_now;
    }
    else {
        status = mLogType;
    }

    return status;
}
예제 #21
0
//-----------------------------------------------------------------------------
android::status_t android_prev_setPreset(const android::sp<android::AudioEffect>& pFx,
        uint16_t preset) {
    android::status_t status = android_fx_setParam(pFx, REVERB_PARAM_PRESET,
            PRESETREVERB_PARAM_SIZE_MAX, &preset, sizeof(uint16_t));
    // enable the effect if the preset is different from SL_REVERBPRESET_NONE
    pFx->setEnabled(SL_REVERBPRESET_NONE != preset);
    return status;
}
예제 #22
0
 static android::sp<android::ISurface> getISurface (const android::sp<android::Surface>& surface) {
     if (surface != NULL) {
         return surface->getISurface ();
     } else {
         ERROR ("surface == NULL!");
         return NULL;
     }
 }
예제 #23
0
void handle_death_recipient()
{
   ALOGI("%s", __func__);
   auto hidl_death_unlink = anthci->unlinkToDeath(ANTHidlDeathRecipient);
   ant_hci.state = ANT_RADIO_RESETTING;
   ant_rx_clear();
   anthci =nullptr;
}
예제 #24
0
sp<PipelineModelBase::HalImageStreamBufferPoolT>
PipelineModelBase::
replaceHalStreamBufferPoolLocked(
    android::sp<IImageStreamInfo> pStreamInfo,
    android::sp<HalImageStreamBufferPoolT> pPool
)
{
    if  ( pStreamInfo == 0 ) {
        MY_LOGE("NULL IImageStreamInfo");
        return NULL;
    }
    //
    if(mHalImageStreamBufferPoolMap.indexOfKey(pStreamInfo->getStreamId()) >= 0)
        mHalImageStreamBufferPoolMap.replaceValueFor(pStreamInfo->getStreamId(), pPool);
    else
        mHalImageStreamBufferPoolMap.add(pStreamInfo->getStreamId(), pPool);
    return pPool;
}
예제 #25
0
android::sp<android::IBinder> get_service_manager()
{
    static android::sp<android::IBinder> binder;
    if (binder.get() != NULL) {
        return binder;
    }
    ALOGD("elastos servicemanager try getting...\n");
    android::sp<android::IServiceManager> sm = android::defaultServiceManager();
    do {
        binder = sm->getService(android::String16(ELASTOS_SERVICEMGR_NAME));
        if (binder != 0) {
            break;
        }
        usleep(500000);
    } while (true);
    ALOGD("elastos servicemanager getted.\n");
    return binder;
}
예제 #26
0
ssize_t
UsersManager::
enqueUserGraph(
    android::sp<IUserGraph> pUserGraph
)
{
    RWLock::AutoWLock _l(mRWLock);
    //
    return enqueUserGraph_(pUserGraph->getGroupIndex());
}
예제 #27
0
bool hci_initialize()
{
   ALOGI("%s", __func__);

   anthci = IAntHci::getService();

   if(anthci != nullptr)
   {
      ant_hci.state = ANT_RADIO_ENABLING;
      ant_hci.rx_processing = false;
      android::sp<IAntHciCallbacks> callbacks = new AntHciCallbacks();
      anthci->initialize(callbacks);
      ALOGV("%s: exit", __func__);
      auto hidl_death_link = anthci->linkToDeath(ANTHidlDeathRecipient, 0);
      return true;
   } else {
      return false;
   }
}
예제 #28
0
 android::sp<IDemo> IDemo::asInterface(const android::sp<android::IBinder>& obj) {
     android::sp<IDemo> intr;
     if (obj != NULL) {
         intr = static_cast<IDemo*>(obj->queryLocalInterface(IDemo::descriptor).get());
         if (intr == NULL) {
             intr = new BpDemo(obj);
         }
     }
     return intr;
 }
예제 #29
0
//-----------------------------------------------------------------------------
bool android_fx_initEffectObj(audio_session_t sessionId, android::sp<android::AudioEffect>& effect,
        const effect_uuid_t *type) {
    //SL_LOGV("android_fx_initEffectObj on session %d", sessionId);

    effect = new android::AudioEffect(type, android::String16(), EFFECT_UUID_NULL,
            0,// priority
            0,// effect callback
            0,// callback data
            sessionId,// session ID
            0 );// output

    android::status_t status = effect->initCheck();
    if (android::NO_ERROR != status) {
        effect.clear();
        SL_LOGE("Effect initCheck() returned %d", status);
        return false;
    }

    return true;
}
예제 #30
0
MERROR
PipelineModelBase::
acquireHalStreamBuffer(
    android::sp<IImageStreamInfo> const pStreamInfo,
    android::sp<HalImageStreamBuffer>& rpStreamBuffer
)   const
{
    RWLock::AutoRLock _l(mRWLock);
    //
    StreamId_T const streamId = pStreamInfo->getStreamId();
    sp<HalImageStreamBufferPoolT> pPool = mHalImageStreamBufferPoolMap.valueFor(streamId);
    MY_LOGE_IF(pPool == 0, "NULL HalImageStreamBufferPool - stream:%#"PRIxPTR"(%s)", streamId, pStreamInfo->getStreamName());
    MERROR err = pPool == 0 ? UNKNOWN_ERROR : pPool->acquireFromPool(__FUNCTION__, rpStreamBuffer, ::s2ns(10));
    MY_LOGA_IF(
        OK!=err || rpStreamBuffer==0,
        "[acquireFromPool] err:%d(%s) pStreamBuffer:%p stream:%#"PRIxPTR"(%s)",
        err, ::strerror(-err), rpStreamBuffer.get(), streamId, pStreamInfo->getStreamName()
    );
    return err;
}