Example #1
0
StreamingProcessor::StreamingProcessor(sp<Camera2Client> client):
        mClient(client),
        mDevice(client->getCameraDevice()),
        mId(client->getCameraId()),
        mActiveRequest(NONE),
        mPaused(false),
        mPreviewRequestId(Camera2Client::kPreviewRequestIdStart),
        mPreviewStreamId(NO_STREAM),
        mRecordingRequestId(Camera2Client::kRecordingRequestIdStart),
        mRecordingStreamId(NO_STREAM),
        mRecordingFrameAvailable(false),
        mRecordingHeapCount(kDefaultRecordingHeapCount),
        mRecordingHeapFree(kDefaultRecordingHeapCount)
{
}
status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
        const sp<Camera2Client> &client) {
    status_t res = BAD_VALUE;
    ATRACE_CALL();
    camera_metadata_ro_entry_t entry;
    bool enableFaceDetect;

    {
        SharedParameters::Lock l(client->getParameters());
        enableFaceDetect = l.mParameters.enableFaceDetect;
    }
    entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);

    // TODO: This should be an error once implementations are compliant
    if (entry.count == 0) {
        return OK;
    }

    uint8_t faceDetectMode = entry.data.u8[0];

    camera_frame_metadata metadata;
    Vector<camera_face_t> faces;
    metadata.number_of_faces = 0;

    if (enableFaceDetect &&
        faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {

        SharedParameters::Lock l(client->getParameters());
        entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
        if (entry.count == 0) {
            // No faces this frame
            /* warning: locks SharedCameraCallbacks */
            callbackFaceDetection(client, metadata);
            return OK;
        }
        metadata.number_of_faces = entry.count / 4;
        if (metadata.number_of_faces >
                l.mParameters.fastInfo.maxFaces) {
            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
                    __FUNCTION__, client->getCameraId(),
                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
            return res;
        }
        const int32_t *faceRects = entry.data.i32;

        entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Unable to read face scores",
                    __FUNCTION__, client->getCameraId());
            return res;
        }
        const uint8_t *faceScores = entry.data.u8;

        const int32_t *faceLandmarks = NULL;
        const int32_t *faceIds = NULL;

        if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
            entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
            if (entry.count == 0) {
                ALOGE("%s: Camera %d: Unable to read face landmarks",
                        __FUNCTION__, client->getCameraId());
                return res;
            }
            faceLandmarks = entry.data.i32;

            entry = frame.find(ANDROID_STATISTICS_FACE_IDS);

            if (entry.count == 0) {
                ALOGE("%s: Camera %d: Unable to read face IDs",
                        __FUNCTION__, client->getCameraId());
                return res;
            }
            faceIds = entry.data.i32;
        }

        entry = frame.find(ANDROID_SCALER_CROP_REGION);
        if (entry.count < 4) {
            ALOGE("%s: Camera %d: Unable to read crop region (count = %d)",
                    __FUNCTION__, client->getCameraId(), entry.count);
            return res;
        }

        Parameters::CropRegion scalerCrop = {
            static_cast<float>(entry.data.i32[0]),
            static_cast<float>(entry.data.i32[1]),
            static_cast<float>(entry.data.i32[2]),
            static_cast<float>(entry.data.i32[3])};

        faces.setCapacity(metadata.number_of_faces);

        size_t maxFaces = metadata.number_of_faces;
        for (size_t i = 0; i < maxFaces; i++) {
            if (faceScores[i] == 0) {
                metadata.number_of_faces--;
                continue;
            }
            if (faceScores[i] > 100) {
                ALOGW("%s: Face index %zu with out of range score %d",
                        __FUNCTION__, i, faceScores[i]);
            }

            camera_face_t face;

            face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
                                faceRects[i*4 + 0], scalerCrop);
            face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
                                faceRects[i*4 + 1], scalerCrop);
            face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
                                faceRects[i*4 + 2], scalerCrop);
            face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
                                faceRects[i*4 + 3], scalerCrop);

            face.score = faceScores[i];
            if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
                face.id = faceIds[i];
                face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
                        faceLandmarks[i*6 + 0], scalerCrop);
                face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
                        faceLandmarks[i*6 + 1], scalerCrop);
                face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
                        faceLandmarks[i*6 + 2], scalerCrop);
                face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
                        faceLandmarks[i*6 + 3], scalerCrop);
                face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
                        faceLandmarks[i*6 + 4], scalerCrop);
                face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
                        faceLandmarks[i*6 + 5], scalerCrop);
            } else {
                face.id = 0;
                face.left_eye[0] = face.left_eye[1] = -2000;
                face.right_eye[0] = face.right_eye[1] = -2000;
                face.mouth[0] = face.mouth[1] = -2000;
            }
            faces.push_back(face);
        }

        metadata.faces = faces.editArray();
    }

    /* warning: locks SharedCameraCallbacks */
    callbackFaceDetection(client, metadata);

    return OK;
}
status_t FrameProcessor::process3aState(const CaptureResult &frame,
        const sp<Camera2Client> &client) {

    ATRACE_CALL();
    const CameraMetadata &metadata = frame.mMetadata;
    camera_metadata_ro_entry_t entry;
    int cameraId = client->getCameraId();

    entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
    int32_t frameNumber = entry.data.i32[0];

    // Don't send 3A notifications for the same frame number twice
    if (frameNumber <= mLast3AFrameNumber) {
        ALOGV("%s: Already sent 3A for frame number %d, skipping",
                __FUNCTION__, frameNumber);
        return OK;
    }

    mLast3AFrameNumber = frameNumber;

    // Get 3A states from result metadata
    bool gotAllStates = true;

    AlgState new3aState;

    // TODO: Also use AE mode, AE trigger ID

    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
            &new3aState.afMode, frameNumber, cameraId);

    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
            &new3aState.awbMode, frameNumber, cameraId);

    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
            &new3aState.aeState, frameNumber, cameraId);

    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
            &new3aState.afState, frameNumber, cameraId);

    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
            &new3aState.awbState, frameNumber, cameraId);

    if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
        new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
        new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
    } else {
        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
                 &new3aState.afTriggerId, frameNumber, cameraId);

        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
                 &new3aState.aeTriggerId, frameNumber, cameraId);
    }

    if (!gotAllStates) return BAD_VALUE;

    if (new3aState.aeState != m3aState.aeState) {
        ALOGV("%s: Camera %d: AE state %d->%d",
                __FUNCTION__, cameraId,
                m3aState.aeState, new3aState.aeState);
        client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
    }

    if (new3aState.afState != m3aState.afState ||
        new3aState.afMode != m3aState.afMode ||
        new3aState.afTriggerId != m3aState.afTriggerId) {
        ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
                __FUNCTION__, cameraId,
                m3aState.afState, new3aState.afState,
                m3aState.afMode, new3aState.afMode,
                m3aState.afTriggerId, new3aState.afTriggerId);
        client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
    }
    if (new3aState.awbState != m3aState.awbState ||
        new3aState.awbMode != m3aState.awbMode) {
        ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
                __FUNCTION__, cameraId,
                m3aState.awbState, new3aState.awbState,
                m3aState.awbMode, new3aState.awbMode);
        client->notifyAutoWhitebalance(new3aState.awbState,
                new3aState.aeTriggerId);
    }

    m3aState = new3aState;

    return OK;
}