int Fpc1020Sensor::FingerprintThread::waitForTouchDown()
{
    fingerprint_get_touch_rsp_t *resp =
            (fingerprint_get_touch_rsp_t *) mSensor->mQseecom.getReceiveBuffer();
    int ret;

    // First wait for the sensor to become free ...
    do {
        ret = mSensor->sendCommand(CLIENT_CMD_WAIT_FOR_TOUCH_UP);
        if (exitPending()) {
            ret = -EINTR;
        }
    } while (ret == 0 && resp->result != 0);

    if (ret) {
        ALOGV("Waiting for touch down failed: %d", ret);
        return ret;
    }

    // ... then wait for the next touch
    do {
        ret = mSensor->scanForTouchDown();
        if (exitPending()) {
            ret = -EINTR;
        }
    } while (ret == 0 && resp->result != 0);

    if (ret) {
        ALOGV("Waiting for touch up failed: %d", ret);
    }
    return ret;
}
Example #2
0
bool
RawDumpCmdQueThread::
getCommand(sp<RawDumpCmdCookie> &rCmdCookie)
{
    FUNCTION_IN;
    //
    bool ret = false;
    //
    Mutex::Autolock _l(mCmdMtx);
    //
    MY_LOGD("+ tid(%d), que size(%d)", ::gettid(), mCmdQ.size());
    //
    while ( mCmdQ.empty() && ! exitPending() )
    {
        mCmdCond.wait(mCmdMtx);    
    }
    // get the latest frame, e.g. drop the 
    if ( !mCmdQ.empty() )
    {
        rCmdCookie = *mCmdQ.begin();
        mCmdQ.erase(mCmdQ.begin());
        ret = true;
        MY_LOGD(" frame[%d] in slot[%d] is dequeued.", rCmdCookie->getFrameCnt(),rCmdCookie->getFrameCnt() );
    }
    //
    MY_LOGD("- tid(%d), que size(%d), ret(%d)", ::gettid(), mCmdQ.size(), ret);
    //
    FUNCTION_OUT;
    //
    return ret;
}
Example #3
0
bool BootAnimation::android() {
    initTexture(&mAndroid[0], mAssets, "images/android-logo-mask.png");
    initTexture(&mAndroid[1], mAssets, "images/android-logo-shine.png");

    // clear screen
    glDisable(GL_DITHER);
    glDisable(GL_SCISSOR_TEST);
    glClear(GL_COLOR_BUFFER_BIT);
    eglSwapBuffers(mDisplay, mSurface);

    const GLint xc = (mWidth  - mAndroid[0].w) / 2;
    const GLint yc = (mHeight - mAndroid[0].h) / 2;
    const Rect updateRect(xc, yc, xc + mAndroid[0].w, yc + mAndroid[0].h);

    // draw and update only what we need
    mNativeWindowSurface->setSwapRectangle(updateRect.left,
            updateRect.top, updateRect.width(), updateRect.height());

    glScissor(updateRect.left, mHeight - updateRect.bottom, updateRect.width(),
            updateRect.height());

    // Blend state
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);

    const nsecs_t startTime = systemTime();
    do {
        nsecs_t now = systemTime();
        double time = now - startTime;
        float t = 4.0f * float(time / us2ns(16667)) / mAndroid[1].w;
        GLint offset = (1 - (t - floorf(t))) * mAndroid[1].w;
        GLint x = xc - offset;

        glDisable(GL_SCISSOR_TEST);
        glClear(GL_COLOR_BUFFER_BIT);

        glEnable(GL_SCISSOR_TEST);
        glDisable(GL_BLEND);
        glBindTexture(GL_TEXTURE_2D, mAndroid[1].name);
        glDrawTexiOES(x,                 yc, 0, mAndroid[1].w, mAndroid[1].h);
        glDrawTexiOES(x + mAndroid[1].w, yc, 0, mAndroid[1].w, mAndroid[1].h);

        glEnable(GL_BLEND);
        glBindTexture(GL_TEXTURE_2D, mAndroid[0].name);
        glDrawTexiOES(xc, yc, 0, mAndroid[0].w, mAndroid[0].h);

        EGLBoolean res = eglSwapBuffers(mDisplay, mSurface);
        if (res == EGL_FALSE)
            break;

        // 12fps: don't animate too fast to preserve CPU
        const nsecs_t sleepTime = 83333 - ns2us(systemTime() - now);
        if (sleepTime > 0)
            usleep(sleepTime); 
    } while (!exitPending());

    glDeleteTextures(1, &mAndroid[0].name);
    glDeleteTextures(1, &mAndroid[1].name);
    return false;
}
bool AudioALSALoopbackController::AudioMTKLoopbackThread::threadLoop()
{
    uint8_t *pReadBuffer;
    uint32_t uReadByte, uWriteDataToBT;
    CVSDLoopbackResetBuffer();
    while (!(exitPending() == true))
    {
        ALOGD("BT_SW_CVSD AP loopback threadLoop(+)");
        uWriteDataToBT = 0;
        CVSDLoopbackGetReadBuffer(&pReadBuffer, &uReadByte);
        uReadByte &= 0xFFFFFFFE;
        if (uReadByte)
        {
            uWriteDataToBT = streamOutput->write(pReadBuffer, uReadByte);
            CVSDLoopbackReadDataDone(uWriteDataToBT);
        }
        else
        {
            usleep(5 * 1000); //5ms
        }
        ALOGD("BT_SW_CVSD AP loopback threadLoop(-), uReadByte: %d, uWriteDataToBT: %d", uReadByte, uWriteDataToBT);
    }
    ALOGD("BT_SW_CVSD AP loopback threadLoop exit");
    return false;
}
Example #5
0
	//add vtmal test case here!!!!
	virtual bool  threadLoop()
	{
		VTMAL_LOGINFO;
		while (!exitPending())
		{
			m_iSignalCount ++;
			if(m_iSignalCount == 1)
			{
				VTMAL_LOGINFO;
			}
			{ //avoid m_Lock and m_LockCondition deadlock
				//so free m_LockCondition first before try to lock m_Lock in PushData()
				Mutex::Autolock _l(m_LockCondition);
				if(!m_sentSignal)
					mWaitWork.wait(m_LockCondition);
				m_sentSignal = false;
				if(m_iSignalCount == 1)
				{
					VTMAL_LOGINFO;
				}
				if(exitPending())
				{
					VTMAL_LOGINFO;
					return false;
				}
			}			
			if(_p)
			{
				_p->PushData();
			}
			else
			{
				VTMAL_LOGERR;
				LOGE("[VTMAL]CMp4Encoder @ SendThread _p is NULL!!! = 0x%x",_p);
				//CHECK(_p);
			}

		}
		VTMAL_LOGINFO;
		return false;
	}
bool TaskManager::WorkerThread::addTask(TaskWrapper task) {
    if (!isRunning()) {
        run(mName.string(), PRIORITY_DEFAULT);
    } else if (exitPending()) {
        return false;
    }

    Mutex::Autolock l(mLock);
    ssize_t index = mTasks.add(task);
    mSignal.signal();

    return index >= 0;
}
bool Fpc1020Sensor::AuthenticationThread::threadLoop()
{
    ALOGV("Started authentication thread");

    do {
        int ret = doSingleAuthentication();
        if (ret) {
            ALOGW("Authentication attempt failed: %d", ret);
            mSensor->mErrorCb(ret, mSensor->mCbData);
        }
    } while (!exitPending() && mSensor->mWaitingForWakeup);

    handleShutdown();
    return false;
}
bool TaskManager::WorkerThread::addTask(const TaskWrapper& task) {
    if (!isRunning()) {
        run(mName.string(), PRIORITY_DEFAULT);
    } else if (exitPending()) {
        return false;
    }

    {
        Mutex::Autolock l(mLock);
        mTasks.push_back(task);
    }
    mSignal.signal();

    return true;
}
Example #9
0
	virtual bool        threadLoop()
	{
		sp<work_item_t> data;

		while (!exitPending()){
			_TRACE("check work");
			{
				Mutex::Autolock _locker(workMutex);
				if(work.size() == 0){
					workCond.wait(workMutex);
				}
				if(work.size() ==0){
					_TRACE("no work ");
					continue;
				}
			}

			//usleep(500*1000);
			data = work[0];
			work.removeAt(0);

			_TRACE("got work(%p,cmd:%d, sp1:%s, sp2:%s) ",data.get(),data->cmd,DISPLAY_STRING16(data->sparam1),DISPLAY_STRING16(data->sparam2));
			switch(data->cmd){
			case	WORK_Defer_OnNewStatement:
				{
					sp<ChatSession> cs = data->cs;
					cs->Fire_OnNewStatement(data->sparam1,data->sparam2);
				}break;
			case	WORK_Defer_OnNewUser:
				{
					sp<ChatSession> cs = data->cs;
					cs->Fire_OnNewUser(data->sparam1);
				}break;
			case	WORK_Defer_OnUserLeft:
				{
					sp<ChatSession> cs = data->cs;
					cs->Fire_OnUserLeft(data->sparam1);
				}break;
			default:
				break;
			}

			data.clear();
			_TRACE("work down");
		}
		return false;

    }
bool JpegCompressor::threadLoop() {
    Mutex::Autolock lock(mMutex);
    ALOGV("%s: Starting compression thread", __FUNCTION__);

    // Find source and target buffers. Assumes only one buffer matches
    // each condition!

    bool foundJpeg = false, mFoundAux = false;
    for (size_t i = 0; i < mBuffers->size(); i++) {
        const StreamBuffer &b = (*mBuffers)[i];
        if (b.format == HAL_PIXEL_FORMAT_BLOB) {
            mJpegBuffer = b;
            mFoundJpeg = true;
        } else if (b.streamId <= 0) {
            mAuxBuffer = b;
            mFoundAux = true;
        }
        if (mFoundJpeg && mFoundAux) break;
    }
    if (!mFoundJpeg || !mFoundAux) {
        ALOGE("%s: Unable to find buffers for JPEG source/destination",
                __FUNCTION__);
        cleanUp();
        return false;
    }

    // Set up error management

    mJpegErrorInfo = NULL;
    JpegError error;
    error.parent = this;

    mCInfo.err = jpeg_std_error(&error);
    mCInfo.err->error_exit = jpegErrorHandler;

    jpeg_create_compress(&mCInfo);
    if (checkError("Error initializing compression")) return false;

    // Route compressed data straight to output stream buffer

    JpegDestination jpegDestMgr;
    jpegDestMgr.parent = this;
    jpegDestMgr.init_destination = jpegInitDestination;
    jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
    jpegDestMgr.term_destination = jpegTermDestination;

    mCInfo.dest = &jpegDestMgr;

    // Set up compression parameters

    mCInfo.image_width = mAuxBuffer.width;
    mCInfo.image_height = mAuxBuffer.height;
    mCInfo.input_components = 3;
    mCInfo.in_color_space = JCS_RGB;

    jpeg_set_defaults(&mCInfo);
    if (checkError("Error configuring defaults")) return false;

    // Do compression

    jpeg_start_compress(&mCInfo, TRUE);
    if (checkError("Error starting compression")) return false;

    size_t rowStride = mAuxBuffer.stride * 3;
    const size_t kChunkSize = 32;
    while (mCInfo.next_scanline < mCInfo.image_height) {
        JSAMPROW chunk[kChunkSize];
        for (size_t i = 0 ; i < kChunkSize; i++) {
            chunk[i] = (JSAMPROW)
                    (mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
        }
        jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
        if (checkError("Error while compressing")) return false;
        if (exitPending()) {
            ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
            cleanUp();
            return false;
        }
    }

    jpeg_finish_compress(&mCInfo);
    if (checkError("Error while finishing compression")) return false;

    // Write to JPEG output stream

    ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
          mJpegBuffer.streamId);

    GraphicBufferMapper::get().unlock(*(mJpegBuffer.buffer));
    status_t res;
    const Stream &s = mParent->getStreamInfo(mJpegBuffer.streamId);
    res = s.ops->enqueue_buffer(s.ops, mCaptureTime, mJpegBuffer.buffer);
    if (res != OK) {
        ALOGE("%s: Error queueing compressed image buffer %p: %s (%d)",
                __FUNCTION__, mJpegBuffer.buffer, strerror(-res), res);
        mParent->signalError();
    }

    // All done

    cleanUp();

    return false;
}
Example #11
0
bool BootAnimation::movie()
{
    ZipFileRO& zip(mZip);

    size_t numEntries = zip.getNumEntries();
    ZipEntryRO desc = zip.findEntryByName("desc.txt");
    FileMap* descMap = zip.createEntryFileMap(desc);
    LOGE_IF(!descMap, "descMap is null");
    if (!descMap) {
        return false;
    }

    String8 desString((char const*)descMap->getDataPtr(),
            descMap->getDataLength());
    char const* s = desString.string();

    Animation animation;

    // Parse the description file
    for (;;) {
        const char* endl = strstr(s, "\n");
        if (!endl) break;
        String8 line(s, endl - s);
        const char* l = line.string();
        int fps, width, height, count, pause;
        char path[256];
        if (sscanf(l, "%d %d %d", &width, &height, &fps) == 3) {
            //LOGD("> w=%d, h=%d, fps=%d", fps, width, height);
            animation.width = width;
            animation.height = height;
            animation.fps = fps;
        }
        if (sscanf(l, "p %d %d %s", &count, &pause, path) == 3) {
            //LOGD("> count=%d, pause=%d, path=%s", count, pause, path);
            Animation::Part part;
            part.count = count;
            part.pause = pause;
            part.path = path;
            animation.parts.add(part);
        }
        s = ++endl;
    }

    // read all the data structures
    const size_t pcount = animation.parts.size();
    for (size_t i=0 ; i<numEntries ; i++) {
        char name[256];
        ZipEntryRO entry = zip.findEntryByIndex(i);
        if (zip.getEntryFileName(entry, name, 256) == 0) {
            const String8 entryName(name);
            const String8 path(entryName.getPathDir());
            const String8 leaf(entryName.getPathLeaf());
            if (leaf.size() > 0) {
                for (int j=0 ; j<pcount ; j++) {
                    if (path == animation.parts[j].path) {
                        int method;
                        // supports only stored png files
                        if (zip.getEntryInfo(entry, &method, 0, 0, 0, 0, 0)) {
                            if (method == ZipFileRO::kCompressStored) {
                                FileMap* map = zip.createEntryFileMap(entry);
                                if (map) {
                                    Animation::Frame frame;
                                    frame.name = leaf;
                                    frame.map = map;
                                    Animation::Part& part(animation.parts.editItemAt(j));
                                    part.frames.add(frame);
                                }
                            }
                        }
                    }
                }
            }
        }
    }

    // clear screen
    glShadeModel(GL_FLAT);
    glDisable(GL_DITHER);
    glDisable(GL_SCISSOR_TEST);
    glDisable(GL_BLEND);
    glClear(GL_COLOR_BUFFER_BIT);

    eglSwapBuffers(mDisplay, mSurface);

    glBindTexture(GL_TEXTURE_2D, 0);
    glEnable(GL_TEXTURE_2D);
    glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    const int xc = (mWidth - animation.width) / 2;
    const int yc = ((mHeight - animation.height) / 2);
    nsecs_t lastFrame = systemTime();
    nsecs_t frameDuration = s2ns(1) / animation.fps;

    Region clearReg(Rect(mWidth, mHeight));
    clearReg.subtractSelf(Region(Rect(xc, yc, xc+animation.width, yc+animation.height)));

    for (int i=0 ; i<pcount && !exitPending() ; i++) {
        const Animation::Part& part(animation.parts[i]);
        const size_t fcount = part.frames.size();
        glBindTexture(GL_TEXTURE_2D, 0);

        for (int r=0 ; !part.count || r<part.count ; r++) {
            for (int j=0 ; j<fcount && !exitPending(); j++) {
                const Animation::Frame& frame(part.frames[j]);

                if (r > 0) {
                    glBindTexture(GL_TEXTURE_2D, frame.tid);
                } else {
                    if (part.count != 1) {
                        glGenTextures(1, &frame.tid);
                        glBindTexture(GL_TEXTURE_2D, frame.tid);
                        glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
                        glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
                    }
                    initTexture(
                            frame.map->getDataPtr(),
                            frame.map->getDataLength());
                }

                if (!clearReg.isEmpty()) {
                    Rect r;
                    Region::iterator head(clearReg);
                    glEnable(GL_SCISSOR_TEST);
                    while (head.iterate(&r)) {
                        glScissor(r.left, mHeight - r.bottom,
                                r.width(), r.height());
                        glClear(GL_COLOR_BUFFER_BIT);
                    }
                    glDisable(GL_SCISSOR_TEST);
                }
                glDrawTexiOES(xc, yc, 0, animation.width, animation.height);
                eglSwapBuffers(mDisplay, mSurface);

                nsecs_t now = systemTime();
                nsecs_t delay = frameDuration - (now - lastFrame);
                lastFrame = now;
                long wait = ns2us(frameDuration);
                if (wait > 0)
                    usleep(wait);
            }
            usleep(part.pause * ns2us(frameDuration));
        }

        // free the textures for this part
        if (part.count != 1) {
            for (int j=0 ; j<fcount ; j++) {
                const Animation::Frame& frame(part.frames[j]);
                glDeleteTextures(1, &frame.tid);
            }
        }
    }

    return false;
}
bool Fpc1020Sensor::EnrollmentThread::threadLoop()
{
    int ret, enrolledId, stepsRemaining = Fpc1020Sensor::EnrollmentStepCount;
    fingerprint_enroll_rsp_t *resp =
            (fingerprint_enroll_rsp_t *) mSensor->mQseecom.getReceiveBuffer();
    fingerprint_end_enroll_cmd_t *endReq =
            (fingerprint_end_enroll_cmd_t *) mSensor->mQseecom.getSendBuffer();
    fingerprint_end_enroll_rsp_t *endResp =
            (fingerprint_end_enroll_rsp_t *) mSensor->mQseecom.getReceiveBuffer();

    ALOGV("Started enrollment thread");
    while (!exitPending()) {
        ret = waitForTouchDown();
        if (ret) {
            goto out;
        }
        mSensor->mAcquiredCb(mSensor->mCbData);
        ret = mSensor->sendCommand(CLIENT_CMD_FP_GET_IMAGE_WITH_CAC);
        if (ret) {
            goto out;
        }
        ret = mSensor->sendCommand(CLIENT_CMD_FP_DO_ENROLL);
        if (ret) {
            goto out;
        }

        ALOGD("Enrollment step done: result %d progress %d", resp->result, resp->progress);

        if (resp->result == 3) {
            // The return value of 3 means 'enrollment done'
            break;
        } else if (resp->result == 0) {
            stepsRemaining--;
            // While empiric evidence shows the number of enrollment steps is
            // always 20, the protocol doesn't provide any guarantee for it.
            // Safeguard against that case.
            if (stepsRemaining <= 0) {
                stepsRemaining = 1;
            }
            mSensor->mEnrollmentCb(0xffffffff, stepsRemaining, mSensor->mCbData);
        } else {
            ALOGI("Enrollment step returned error (%d), ignoring.", resp->result);
        }
    }

    memset(endReq->unknown, 0, sizeof(endReq->unknown));
    endReq->identifier_len = 0; // identifier unused for now

    ret = mSensor->sendCommand(CLIENT_CMD_FP_END_ENROLL);
    if (ret) {
        goto out;
    }

    ALOGD("Enrollment thread finished: result %d id 0x%08x", endResp->result, endResp->id);
    enrolledId = endResp->id;
    if (endResp->result != 0) {
        ret = -EIO;
    }

out:
    ret = adjustReturnValueForCancel(ret);
    handleShutdown();

    if (ret == 0) {
        // Do the final enrollment callback after doing the cleanup, so
        // we're in idle state when doing this call. Upper layers query
        // the enrollment list more or less directly after this callback,
        // so if we aren't in idle state at this point, we're prone to
        // race conditions.
        mSensor->mEnrollmentCb(enrolledId, 0, mSensor->mCbData);
    } else {
        ALOGD("Enrollment failed: %d", ret);
        mSensor->mErrorCb(ret, mSensor->mCbData);
    }

    return false;
}
 virtual bool        isExitPending() const   { return exitPending(); }
Example #14
0
bool AudioPolicyService::AudioCommandThread::threadLoop()
{
    nsecs_t waitTime = INT64_MAX;

    mLock.lock();
    while (!exitPending())
    {
        while (!mAudioCommands.isEmpty()) {
            nsecs_t curTime = systemTime();
            // commands are sorted by increasing time stamp: execute them from index 0 and up
            if (mAudioCommands[0]->mTime <= curTime) {
                AudioCommand *command = mAudioCommands[0];
                mAudioCommands.removeAt(0);
                mLastCommand = *command;

                switch (command->mCommand) {
                case START_TONE: {
                    mLock.unlock();
                    ToneData *data = (ToneData *)command->mParam;
                    ALOGV("AudioCommandThread() processing start tone %d on stream %d",
                          data->mType, data->mStream);
                    delete mpToneGenerator;
                    mpToneGenerator = new ToneGenerator(data->mStream, 1.0);
                    mpToneGenerator->startTone(data->mType);
                    delete data;
                    mLock.lock();
                }
                break;
                case STOP_TONE: {
                    mLock.unlock();
                    ALOGV("AudioCommandThread() processing stop tone");
                    if (mpToneGenerator != NULL) {
                        mpToneGenerator->stopTone();
                        delete mpToneGenerator;
                        mpToneGenerator = NULL;
                    }
                    mLock.lock();
                }
                break;
                case SET_VOLUME: {
                    VolumeData *data = (VolumeData *)command->mParam;
                    ALOGV("AudioCommandThread() processing set volume stream %d, \
                        volume %f, output %d", data->mStream, data->mVolume, data->mIO);
                    command->mStatus = AudioSystem::setStreamVolume(data->mStream,
                                       data->mVolume,
                                       data->mIO);
                    if (command->mWaitStatus) {
                        command->mCond.signal();
                        mWaitWorkCV.wait(mLock);
                    }
                    delete data;
                }
                break;
                case SET_PARAMETERS: {
                    ParametersData *data = (ParametersData *)command->mParam;
                    ALOGV("AudioCommandThread() processing set parameters string %s, io %d",
                          data->mKeyValuePairs.string(), data->mIO);
                    command->mStatus = AudioSystem::setParameters(data->mIO, data->mKeyValuePairs);
                    if (command->mWaitStatus) {
                        command->mCond.signal();
                        mWaitWorkCV.wait(mLock);
                    }
                    delete data;
                }
                break;
                case SET_VOICE_VOLUME: {
                    VoiceVolumeData *data = (VoiceVolumeData *)command->mParam;
                    ALOGV("AudioCommandThread() processing set voice volume volume %f",
                          data->mVolume);
                    command->mStatus = AudioSystem::setVoiceVolume(data->mVolume);
                    if (command->mWaitStatus) {
                        command->mCond.signal();
                        mWaitWorkCV.wait(mLock);
                    }
                    delete data;
                }
                break;
                default:
                    ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
                }
                delete command;
                waitTime = INT64_MAX;
            } else {
                waitTime = mAudioCommands[0]->mTime - curTime;
                break;
            }
        }
bool AudioPlayer::threadLoop()
{
    struct pcm_config config;
    struct pcm *pcm = NULL;
    bool moreChunks = true;
    const struct chunk_fmt* chunkFmt = NULL;
    int bufferSize;
    const uint8_t* wavData;
    size_t wavLength;
    const struct riff_wave_header* wavHeader;

    if (mCurrentFile == NULL) {
        ALOGE("mCurrentFile is NULL");
        return false;
     }

    wavData = (const uint8_t *)mCurrentFile->getDataPtr();
    if (!wavData) {
        ALOGE("Could not access WAV file data");
        goto exit;
    }
    wavLength = mCurrentFile->getDataLength();

    wavHeader = (const struct riff_wave_header *)wavData;
    if (wavLength < sizeof(*wavHeader) || (wavHeader->riff_id != ID_RIFF) ||
        (wavHeader->wave_id != ID_WAVE)) {
        ALOGE("Error: audio file is not a riff/wave file\n");
        goto exit;
    }
    wavData += sizeof(*wavHeader);
    wavLength -= sizeof(*wavHeader);

    do {
        const struct chunk_header* chunkHeader = (const struct chunk_header*)wavData;
        if (wavLength < sizeof(*chunkHeader)) {
            ALOGE("EOF reading chunk headers");
            goto exit;
        }

        wavData += sizeof(*chunkHeader);
        wavLength -=  sizeof(*chunkHeader);

        switch (chunkHeader->id) {
            case ID_FMT:
                chunkFmt = (const struct chunk_fmt *)wavData;
                wavData += chunkHeader->sz;
                wavLength -= chunkHeader->sz;
                break;
            case ID_DATA:
                /* Stop looking for chunks */
                moreChunks = 0;
                break;
            default:
                /* Unknown chunk, skip bytes */
                wavData += chunkHeader->sz;
                wavLength -= chunkHeader->sz;
        }
    } while (moreChunks);

    if (!chunkFmt) {
        ALOGE("format not found in WAV file");
        goto exit;
    }


    memset(&config, 0, sizeof(config));
    config.channels = chunkFmt->num_channels;
    config.rate = chunkFmt->sample_rate;
    config.period_size = mPeriodSize;
    config.period_count = mPeriodCount;
    config.start_threshold = mPeriodSize / 4;
    config.stop_threshold = INT_MAX;
    config.avail_min = config.start_threshold;
    if (chunkFmt->bits_per_sample != 16) {
        ALOGE("only 16 bit WAV files are supported");
        goto exit;
    }
    config.format = PCM_FORMAT_S16_LE;

    pcm = pcm_open(mCard, mDevice, PCM_OUT, &config);
    if (!pcm || !pcm_is_ready(pcm)) {
        ALOGE("Unable to open PCM device (%s)\n", pcm_get_error(pcm));
        goto exit;
    }

    bufferSize = pcm_frames_to_bytes(pcm, pcm_get_buffer_size(pcm));

    while (wavLength > 0) {
        if (exitPending()) goto exit;
        size_t count = bufferSize;
        if (count > wavLength)
            count = wavLength;

        if (pcm_write(pcm, wavData, count)) {
            ALOGE("pcm_write failed (%s)", pcm_get_error(pcm));
            goto exit;
        }
        wavData += count;
        wavLength -= count;
    }

exit:
    if (pcm)
        pcm_close(pcm);
    mCurrentFile->release();
    mCurrentFile = NULL;
    return false;
}
bool StatusTracker::threadLoop() {
    status_t res;

    // Wait for state updates
    {
        Mutex::Autolock pl(mPendingLock);
        while (mPendingChangeQueue.size() == 0 && !mComponentsChanged) {
            res = mPendingChangeSignal.waitRelative(mPendingLock,
                    kWaitDuration);
            if (exitPending()) return false;
            if (res != OK) {
                if (res != TIMED_OUT) {
                    ALOGE("%s: Error waiting on state changes: %s (%d)",
                            __FUNCTION__, strerror(-res), res);
                }
                // TIMED_OUT is expected
                break;
            }
        }
    }

    // After new pending states appear, or timeout, check if we're idle.  Even
    // with timeout, need to check to account for fences that may still be
    // clearing out
    sp<Camera3Device> parent;
    {
        Mutex::Autolock pl(mPendingLock);
        Mutex::Autolock l(mLock);

        // Collect all pending state updates and see if the device
        // collectively transitions between idle and active for each one

        // First pass for changed components or fence completions
        ComponentState prevState = getDeviceStateLocked();
        if (prevState != mDeviceState) {
            // Only collect changes to overall device state
            mStateTransitions.add(prevState);
        }
        // For each pending component state update, check if we've transitioned
        // to a new overall device state
        for (size_t i = 0; i < mPendingChangeQueue.size(); i++) {
            const StateChange &newState = mPendingChangeQueue[i];
            ssize_t idx = mStates.indexOfKey(newState.id);
            // Ignore notices for unknown components
            if (idx >= 0) {
                // Update single component state
                mStates.replaceValueAt(idx, newState.state);
                mIdleFence = Fence::merge(String8("idleFence"),
                        mIdleFence, newState.fence);
                // .. and see if overall device state has changed
                ComponentState newState = getDeviceStateLocked();
                if (newState != prevState) {
                    mStateTransitions.add(newState);
                }
                prevState = newState;
            }
        }
        mPendingChangeQueue.clear();
        mComponentsChanged = false;

        // Store final state after all pending state changes are done with

        mDeviceState = prevState;
        parent = mParent.promote();
    }

    // Notify parent for all intermediate transitions
    if (mStateTransitions.size() > 0 && parent.get()) {
        for (size_t i = 0; i < mStateTransitions.size(); i++) {
            bool idle = (mStateTransitions[i] == IDLE);
            ALOGV("Camera device is now %s", idle ? "idle" : "active");
            parent->notifyStatus(idle);
        }
    }
    mStateTransitions.clear();

    return true;
}
bool LivePhotoSource:: threadLoop() {
	ALOGD("+");
	status_t err = OK;
    MediaBuffer *buffer = NULL;
	int32_t isSync = false;

	while(mSourceStarted && !exitPending() && ((err = mSource->read(&buffer)) == OK)) {
        MediaBuffer* copy = new MediaBuffer(buffer->range_length(), buffer->meta_data());
        memcpy( copy->data(), (uint8_t *)buffer->data() + buffer->range_offset(), buffer->range_length() );
        copy->set_range(0, buffer->range_length());

		int64_t latestTimestampUs = 0;
		//CHECK(copy->meta_data()->findInt64(kKeyTime, &latestTimestampUs));
		copy->meta_data()->findInt64(kKeyTime, &latestTimestampUs);
		ALOGI("cur timestamp is %lldus", latestTimestampUs);
		{
			Mutex::Autolock _lock(mLock);
			
			int32_t isCodecConfig;
			if(copy->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig ) {
				if(mCodecConfigBuffer != NULL) {
					mCodecConfigBuffer->release();
					mCodecConfigBuffer = NULL;
				}
				
				ALOGD("keep codec config buffer");
				mCodecConfigBuffer = copy;
			}
			else {
		        mMediaBufferPool.push_back(copy);

				if(mLivePhotoStarted) {
					mFrameAvailableCond.signal();
					copy->meta_data()->findInt32(kKeyIsSyncFrame, &isSync);
					
					if (!isSync) {
						if (reinterpret_cast<MediaCodecSource *>(mSource.get())->
						                  requestIDRFrame() != OK)
							ALOGW("Send force I cmd fail");
					}
					else {
						mSourceStarted = false;
						buffer->release();
						buffer = NULL;
						break; // 
					}
				}
				else {
					updateBufferPool();
				}
			}
		}

		buffer->release();
		buffer = NULL;
	}

	{
		Mutex::Autolock _lock(mLock);
		if(err != OK) {
			ALOGE("read source err(%d) . this is a bad livephoto", err);
		}
		
		if(mSourceStarted && mLivePhotoStarted) {
			mLivePhotoStarted = false;
			mSourceStarted = false;
			ALOGE("there is an error with exiting while when livephoto started");
			mFrameAvailableCond.signal();
		}
		ALOGD("Thread exit signal");
		mThreadExitCond.signal();
		mIsThreadExit = true;
	}
	
	ALOGD("-");
	return false;
}
bool BootAnimation::movie()
{
    ZipEntryRO desc = mZip->findEntryByName("desc.txt");
    ALOGE_IF(!desc, "couldn't find desc.txt");
    if (!desc) {
        return false;
    }

    FileMap* descMap = mZip->createEntryFileMap(desc);
    mZip->releaseEntry(desc);
    ALOGE_IF(!descMap, "descMap is null");
    if (!descMap) {
        return false;
    }

    String8 desString((char const*)descMap->getDataPtr(),
            descMap->getDataLength());
    descMap->release();
    char const* s = desString.string();

    Animation animation;

    // Parse the description file
    for (;;) {
        const char* endl = strstr(s, "\n");
        if (!endl) break;
        String8 line(s, endl - s);
        const char* l = line.string();
        int fps, width, height, count, pause;
        char path[ANIM_ENTRY_NAME_MAX];
        char pathType;
        if (sscanf(l, "%d %d %d", &width, &height, &fps) == 3) {
            //LOGD("> w=%d, h=%d, fps=%d", width, height, fps);
            animation.width = width;
            animation.height = height;
            animation.fps = fps;
        }
        else if (sscanf(l, " %c %d %d %s", &pathType, &count, &pause, path) == 4) {
            //LOGD("> type=%c, count=%d, pause=%d, path=%s", pathType, count, pause, path);
            Animation::Part part;
            part.playUntilComplete = pathType == 'c';
            part.count = count;
            part.pause = pause;
            part.path = path;
            animation.parts.add(part);
        }

        s = ++endl;
    }

    // read all the data structures
    const size_t pcount = animation.parts.size();
    void *cookie = NULL;
    if (!mZip->startIteration(&cookie)) {
        return false;
    }

    ZipEntryRO entry;
    char name[ANIM_ENTRY_NAME_MAX];
    while ((entry = mZip->nextEntry(cookie)) != NULL) {
        const int foundEntryName = mZip->getEntryFileName(entry, name, ANIM_ENTRY_NAME_MAX);
        if (foundEntryName > ANIM_ENTRY_NAME_MAX || foundEntryName == -1) {
            ALOGE("Error fetching entry file name");
            continue;
        }

        const String8 entryName(name);
        const String8 path(entryName.getPathDir());
        const String8 leaf(entryName.getPathLeaf());
        if (leaf.size() > 0) {
            for (size_t j=0 ; j<pcount ; j++) {
                if (path == animation.parts[j].path) {
                    int method;
                    // supports only stored png files
                    if (mZip->getEntryInfo(entry, &method, NULL, NULL, NULL, NULL, NULL)) {
                        if (method == ZipFileRO::kCompressStored) {
                            FileMap* map = mZip->createEntryFileMap(entry);
                            if (map) {
                                Animation::Frame frame;
                                frame.name = leaf;
                                frame.map = map;
                                Animation::Part& part(animation.parts.editItemAt(j));
                                part.frames.add(frame);
                            }
                        }
                    }
                }
            }
        }
    }

    mZip->endIteration(cookie);

    // clear screen
    glShadeModel(GL_FLAT);
    glDisable(GL_DITHER);
    glDisable(GL_SCISSOR_TEST);
    glDisable(GL_BLEND);
    glClearColor(0,0,0,1);
    glClear(GL_COLOR_BUFFER_BIT);

    eglSwapBuffers(mDisplay, mSurface);

    glBindTexture(GL_TEXTURE_2D, 0);
    glEnable(GL_TEXTURE_2D);
    glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    const int xc = (mWidth - animation.width) / 2;
    const int yc = ((mHeight - animation.height) / 2);
    nsecs_t lastFrame = systemTime();
    nsecs_t frameDuration = s2ns(1) / animation.fps;

    Region clearReg(Rect(mWidth, mHeight));
    clearReg.subtractSelf(Rect(xc, yc, xc+animation.width, yc+animation.height));

    for (size_t i=0 ; i<pcount ; i++) {
        const Animation::Part& part(animation.parts[i]);
        const size_t fcount = part.frames.size();
        glBindTexture(GL_TEXTURE_2D, 0);

        for (int r=0 ; !part.count || r<part.count ; r++) {
            // Exit any non playuntil complete parts immediately
            if(exitPending() && !part.playUntilComplete)
                break;

            for (size_t j=0 ; j<fcount && (!exitPending() || part.playUntilComplete) ; j++) {
                const Animation::Frame& frame(part.frames[j]);
                nsecs_t lastFrame = systemTime();

                if (r > 0) {
                    glBindTexture(GL_TEXTURE_2D, frame.tid);
                } else {
                    if (part.count != 1) {
                        glGenTextures(1, &frame.tid);
                        glBindTexture(GL_TEXTURE_2D, frame.tid);
                        glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
                        glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
                    }
                    initTexture(frame);
                }

                if (!clearReg.isEmpty()) {
                    Region::const_iterator head(clearReg.begin());
                    Region::const_iterator tail(clearReg.end());
                    glEnable(GL_SCISSOR_TEST);
                    while (head != tail) {
                        const Rect& r(*head++);
                        glScissor(r.left, mHeight - r.bottom,
                                r.width(), r.height());
                        glClear(GL_COLOR_BUFFER_BIT);
                    }
                    glDisable(GL_SCISSOR_TEST);
                }
                glDrawTexiOES(xc, yc, 0, animation.width, animation.height);
                eglSwapBuffers(mDisplay, mSurface);

                nsecs_t now = systemTime();
                nsecs_t delay = frameDuration - (now - lastFrame);
                //ALOGD("%lld, %lld", ns2ms(now - lastFrame), ns2ms(delay));
                lastFrame = now;

                if (delay > 0) {
                    struct timespec spec;
                    spec.tv_sec  = (now + delay) / 1000000000;
                    spec.tv_nsec = (now + delay) % 1000000000;
                    int err;
                    do {
                        err = clock_nanosleep(CLOCK_MONOTONIC, TIMER_ABSTIME, &spec, NULL);
                    } while (err<0 && errno == EINTR);
                }

                checkExit();
            }

            usleep(part.pause * ns2us(frameDuration));

            // For infinite parts, we've now played them at least once, so perhaps exit
            if(exitPending() && !part.count)
                break;
        }

        // free the textures for this part
        if (part.count != 1) {
            for (size_t j=0 ; j<fcount ; j++) {
                const Animation::Frame& frame(part.frames[j]);
                glDeleteTextures(1, &frame.tid);
            }
        }
    }

    return false;
}
bool BootAnimation::movie()
{

    char bootenabled[PROPERTY_VALUE_MAX];
    char bootsound[PROPERTY_VALUE_MAX];
    char bootvolume[PROPERTY_VALUE_MAX];
    property_get("persist.sys.boot_enabled", bootenabled, "1");
    property_get("persist.sys.boot_sound", bootsound, "1");
    property_get("persist.sys.boot_volume", bootvolume, "0.2");

    bool bootEnabled = atoi(bootenabled) != 0;
    bool enableSound = atoi(bootsound) != 0;
    float bootVolume = strtof(bootvolume, NULL);

    if(!bootEnabled) {
        return false;
    }

    if(enableSound){
      sp<MediaPlayer> mediaplay = new MediaPlayer();
      mediaplay->setDataSource ("/system/media/audio.mp3", NULL);
      mediaplay->setVolume (bootVolume, bootVolume);
      mediaplay->prepare();
      mediaplay->start();
    }

    ZipFileRO& zip(mZip);

    size_t numEntries = zip.getNumEntries();
    ZipEntryRO desc = zip.findEntryByName("desc.txt");
    FileMap* descMap = zip.createEntryFileMap(desc);
    ALOGE_IF(!descMap, "descMap is null");
    if (!descMap) {
        return false;
    }

    String8 desString((char const*)descMap->getDataPtr(),
            descMap->getDataLength());
    char const* s = desString.string();

    Animation animation;

    float r = 0.0f;
    float g = 0.0f;
    float b = 0.0f;

    // Parse the description file
    for (;;) {
        const char* endl = strstr(s, "\n");
        if (!endl) break;
        String8 line(s, endl - s);
        const char* l = line.string();
        int fps, width, height, count, pause, red, green, blue;
        char path[256];
        char pathType;
        if (sscanf(l, "%d %d %d %d %d %d", &width, &height, &fps, &red, &green, &blue) == 6) {
            //ALOGD("> w=%d, h=%d, fps=%d, rgb=(%d, %d, %d)", width, height, fps, red, green, blue);
            animation.width = width;
            animation.height = height;
            animation.fps = fps;
            r = (float) red / 255.0f;
            g = (float) green / 255.0f;
            b = (float) blue / 255.0f;
        }
        else if (sscanf(l, "%d %d %d", &width, &height, &fps) == 3) {
            //LOGD("> w=%d, h=%d, fps=%d", width, height, fps);
            animation.width = width;
            animation.height = height;
            animation.fps = fps;
        }
        else if (sscanf(l, " %c %d %d %s", &pathType, &count, &pause, path) == 4) {
            //LOGD("> type=%c, count=%d, pause=%d, path=%s", pathType, count, pause, path);
            Animation::Part part;
            part.playUntilComplete = pathType == 'c';
            part.count = count;
            part.pause = pause;
            part.path = path;
            animation.parts.add(part);
        }

        s = ++endl;
    }

    // read all the data structures
    const size_t pcount = animation.parts.size();
    for (size_t i=0 ; i<numEntries ; i++) {
        char name[256];
        ZipEntryRO entry = zip.findEntryByIndex(i);
        if (zip.getEntryFileName(entry, name, 256) == 0) {
            const String8 entryName(name);
            const String8 path(entryName.getPathDir());
            const String8 leaf(entryName.getPathLeaf());
            if (leaf.size() > 0) {
                for (size_t j=0 ; j<pcount ; j++) {
                    if (path == animation.parts[j].path) {
                        int method;
                        // supports only stored png files
                        if (zip.getEntryInfo(entry, &method, 0, 0, 0, 0, 0)) {
                            if (method == ZipFileRO::kCompressStored) {
                                FileMap* map = zip.createEntryFileMap(entry);
                                if (map) {
                                    Animation::Frame frame;
                                    frame.name = leaf;
                                    frame.map = map;
                                    Animation::Part& part(animation.parts.editItemAt(j));
                                    part.frames.add(frame);
                                }
                            }
                        }
                    }
                }
            }
        }
    }

#ifndef CONTINUOUS_SPLASH
    // clear screen
    glShadeModel(GL_FLAT);
    glDisable(GL_DITHER);
    glDisable(GL_SCISSOR_TEST);
    glDisable(GL_BLEND);
    glClearColor(r,g,b,1);
    glClear(GL_COLOR_BUFFER_BIT);

    eglSwapBuffers(mDisplay, mSurface);
#endif

    glBindTexture(GL_TEXTURE_2D, 0);
    glEnable(GL_TEXTURE_2D);
    glTexEnvx(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    const int xc = (mWidth - animation.width) / 2;
    const int yc = ((mHeight - animation.height) / 2);
    nsecs_t lastFrame = systemTime();
    nsecs_t frameDuration = s2ns(1) / animation.fps;

    Region clearReg(Rect(mWidth, mHeight));
    clearReg.subtractSelf(Rect(xc, yc, xc+animation.width, yc+animation.height));

    for (int i=0 ; i<pcount ; i++) {
        const Animation::Part& part(animation.parts[i]);
        const size_t fcount = part.frames.size();

        // can be 1, 0, or not set
        #ifdef NO_TEXTURE_CACHE
        const int noTextureCache = NO_TEXTURE_CACHE;
        #else
        const int noTextureCache = ((animation.width * animation.height * fcount) >
                                 48 * 1024 * 1024) ? 1 : 0;
        #endif

        glBindTexture(GL_TEXTURE_2D, 0);

        for (int r=0 ; !part.count || r<part.count ; r++) {
            // Exit any non playuntil complete parts immediately
            if(exitPending() && !part.playUntilComplete)
                break;

            for (int j=0 ; j<fcount && (!exitPending() || part.playUntilComplete) ; j++) {
                const Animation::Frame& frame(part.frames[j]);
                nsecs_t lastFrame = systemTime();

                if (r > 0 && !noTextureCache) {
                    glBindTexture(GL_TEXTURE_2D, frame.tid);
                } else {
                    if (part.count != 1) {
                        glGenTextures(1, &frame.tid);
                        glBindTexture(GL_TEXTURE_2D, frame.tid);
                        glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
                        glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
                    }
                    initTexture(
                            frame.map->getDataPtr(),
                            frame.map->getDataLength());
                }

                if (!clearReg.isEmpty()) {
                    Region::const_iterator head(clearReg.begin());
                    Region::const_iterator tail(clearReg.end());
                    glEnable(GL_SCISSOR_TEST);
                    while (head != tail) {
                        const Rect& r(*head++);
                        glScissor(r.left, mHeight - r.bottom,
                                r.width(), r.height());
                        glClear(GL_COLOR_BUFFER_BIT);
                    }
                    glDisable(GL_SCISSOR_TEST);
                }
                glDrawTexiOES(xc, yc, 0, animation.width, animation.height);
                eglSwapBuffers(mDisplay, mSurface);

                nsecs_t now = systemTime();
                nsecs_t delay = frameDuration - (now - lastFrame);
                //ALOGD("%lld, %lld", ns2ms(now - lastFrame), ns2ms(delay));
                lastFrame = now;

                if (delay > 0) {
                    struct timespec spec;
                    spec.tv_sec  = (now + delay) / 1000000000;
                    spec.tv_nsec = (now + delay) % 1000000000;
                    int err;
                    do {
                        err = clock_nanosleep(CLOCK_MONOTONIC, TIMER_ABSTIME, &spec, NULL);
                    } while (err<0 && errno == EINTR);
                }

                checkExit();

                if (noTextureCache)
                    glDeleteTextures(1, &frame.tid);
            }

            usleep(part.pause * ns2us(frameDuration));

            // For infinite parts, we've now played them at least once, so perhaps exit
            if(exitPending() && !part.count)
                break;
        }

        // free the textures for this part
        if (part.count != 1 && !noTextureCache) {
            for (size_t j=0 ; j<fcount ; j++) {
                const Animation::Frame& frame(part.frames[j]);
                glDeleteTextures(1, &frame.tid);
            }
        }
    }

    return false;
}