void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) { ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result); sp<ProCameraListener> listener; { Mutex::Autolock _l(mLock); listener = mListener; } CameraMetadata tmp(result); // Unblock waitForFrame(id) callers { Mutex::Autolock al(mWaitMutex); mMetadataReady = true; mLatestMetadata = tmp; // make copy mWaitCondition.broadcast(); } result = tmp.release(); if (listener != NULL) { listener->onResultReceived(requestId, result); } else { free_camera_metadata(result); } }
status_t MetadataQueue::freeBuffers(List<camera_metadata_t*>::iterator start, List<camera_metadata_t*>::iterator end) { while (start != end) { free_camera_metadata(*start); start = mStreamSlot.erase(start); } return OK; }
void CameraMetadata::clear() { if (mLocked) { ALOGE("%s: CameraMetadata is locked", __FUNCTION__); return; } if (mBuffer) { free_camera_metadata(mBuffer); mBuffer = NULL; } }
CameraModule::~CameraModule() { while (mCameraInfoMap.size() > 0) { camera_info cameraInfo = mCameraInfoMap.editValueAt(0); if (cameraInfo.static_camera_characteristics != NULL) { free_camera_metadata( const_cast<camera_metadata_t*>(cameraInfo.static_camera_characteristics)); } mCameraInfoMap.removeItemsAt(0); } }
camera_metadata_t* Metadata::generate() { pthread_mutex_lock(&mMutex); // Reuse if old generated metadata still valid if (!mDirty && mGenerated != NULL) { ALOGV("%s: Reusing generated metadata at %p", __func__, mGenerated); goto out; } // Destroy old metadata if (mGenerated != NULL) { ALOGV("%s: Freeing generated metadata at %p", __func__, mGenerated); free_camera_metadata(mGenerated); mGenerated = NULL; } // Generate new metadata structure ALOGV("%s: Generating new camera metadata structure, Entries:%d Data:%d", __func__, mEntryCount, mDataCount); mGenerated = allocate_camera_metadata(mEntryCount, mDataCount); if (mGenerated == NULL) { ALOGE("%s: Failed to allocate metadata (%d entries %d data)", __func__, mEntryCount, mDataCount); goto out; } // Walk list of entries adding each one to newly allocated metadata for (Entry *current = mHead; current != NULL; current = current->mNext) { int res = add_camera_metadata_entry(mGenerated, current->mTag, current->mData, current->mCount); if (res != 0) { ALOGE("%s: Failed to add camera metadata: %d", __func__, res); free_camera_metadata(mGenerated); mGenerated = NULL; goto out; } } out: pthread_mutex_unlock(&mMutex); return mGenerated; }
Metadata::~Metadata() { Entry *current = mHead; while (current != NULL) { Entry *tmp = current; current = current->mNext; delete tmp; } if (mGenerated != NULL) free_camera_metadata(mGenerated); pthread_mutex_destroy(&mMutex); }
status_t MetadataQueue::setStreamSlot(camera_metadata_t *buf) { if (buf == NULL) { freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); mStreamSlotCount = 0; return OK; } if (mStreamSlotCount > 1) { List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin(); freeBuffers(++mStreamSlot.begin(), mStreamSlot.end()); mStreamSlotCount = 1; } if (mStreamSlotCount == 1) { free_camera_metadata( *(mStreamSlot.begin()) ); *(mStreamSlot.begin()) = buf; } else { mStreamSlot.push_front(buf); mStreamSlotCount = 1; } return OK; }
status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) { if (mBuffer == NULL) { mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2); if (mBuffer == NULL) { ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); return NO_MEMORY; } } else { size_t currentEntryCount = get_camera_metadata_entry_count(mBuffer); size_t currentEntryCap = get_camera_metadata_entry_capacity(mBuffer); size_t newEntryCount = currentEntryCount + extraEntries; newEntryCount = (newEntryCount > currentEntryCap) ? newEntryCount * 2 : currentEntryCap; size_t currentDataCount = get_camera_metadata_data_count(mBuffer); size_t currentDataCap = get_camera_metadata_data_capacity(mBuffer); size_t newDataCount = currentDataCount + extraData; newDataCount = (newDataCount > currentDataCap) ? newDataCount * 2 : currentDataCap; if (newEntryCount > currentEntryCap || newDataCount > currentDataCap) { camera_metadata_t *oldBuffer = mBuffer; mBuffer = allocate_camera_metadata(newEntryCount, newDataCount); if (mBuffer == NULL) { ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); return NO_MEMORY; } append_camera_metadata(mBuffer, oldBuffer); free_camera_metadata(oldBuffer); } } return OK; }
void CameraMetadata::clear() { if (mBuffer) { free_camera_metadata(mBuffer); mBuffer = NULL; } }
int UsbCamera::initStaticInfo() { /* * Setup static camera info. This will have to customized per camera * device. * TODO: this is just some sample code, need tailor for USB cameras. */ if (mStaticInfo != NULL) { free_camera_metadata(mStaticInfo); } Metadata m; /* android.control */ int32_t android_control_ae_available_target_fps_ranges[] = {30, 30}; m.addInt32(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, ARRAY_SIZE(android_control_ae_available_target_fps_ranges), android_control_ae_available_target_fps_ranges); int32_t android_control_ae_compensation_range[] = {-4, 4}; m.addInt32(ANDROID_CONTROL_AE_COMPENSATION_RANGE, ARRAY_SIZE(android_control_ae_compensation_range), android_control_ae_compensation_range); camera_metadata_rational_t android_control_ae_compensation_step[] = {{2,1}}; m.addRational(ANDROID_CONTROL_AE_COMPENSATION_STEP, ARRAY_SIZE(android_control_ae_compensation_step), android_control_ae_compensation_step); int32_t android_control_max_regions[] = {/*AE*/ 1,/*AWB*/ 1,/*AF*/ 1}; m.addInt32(ANDROID_CONTROL_MAX_REGIONS, ARRAY_SIZE(android_control_max_regions), android_control_max_regions); /* android.jpeg */ int32_t android_jpeg_available_thumbnail_sizes[] = {0, 0, 128, 96}; m.addInt32(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ARRAY_SIZE(android_jpeg_available_thumbnail_sizes), android_jpeg_available_thumbnail_sizes); int32_t android_jpeg_max_size[] = {13 * 1024 * 1024}; // 13MB m.addInt32(ANDROID_JPEG_MAX_SIZE, ARRAY_SIZE(android_jpeg_max_size), android_jpeg_max_size); /* android.lens */ float android_lens_info_available_focal_lengths[] = {1.0}; m.addFloat(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, ARRAY_SIZE(android_lens_info_available_focal_lengths), android_lens_info_available_focal_lengths); /* android.request */ int32_t android_request_max_num_output_streams[] = {0, 3, 1}; m.addInt32(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ARRAY_SIZE(android_request_max_num_output_streams), android_request_max_num_output_streams); /* android.scaler */ int32_t android_scaler_available_formats[] = { HAL_PIXEL_FORMAT_RAW16, HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_RGBA_8888, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, // These are handled by YCbCr_420_888 // HAL_PIXEL_FORMAT_YV12, // HAL_PIXEL_FORMAT_YCrCb_420_SP, HAL_PIXEL_FORMAT_YCbCr_420_888}; m.addInt32(ANDROID_SCALER_AVAILABLE_FORMATS, ARRAY_SIZE(android_scaler_available_formats), android_scaler_available_formats); int64_t android_scaler_available_jpeg_min_durations[] = {1}; m.addInt64(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, ARRAY_SIZE(android_scaler_available_jpeg_min_durations), android_scaler_available_jpeg_min_durations); int32_t android_scaler_available_jpeg_sizes[] = {640, 480}; m.addInt32(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, ARRAY_SIZE(android_scaler_available_jpeg_sizes), android_scaler_available_jpeg_sizes); float android_scaler_available_max_digital_zoom[] = {1}; m.addFloat(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, ARRAY_SIZE(android_scaler_available_max_digital_zoom), android_scaler_available_max_digital_zoom); int64_t android_scaler_available_processed_min_durations[] = {1}; m.addInt64(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, ARRAY_SIZE(android_scaler_available_processed_min_durations), android_scaler_available_processed_min_durations); int32_t android_scaler_available_processed_sizes[] = {640, 480}; m.addInt32(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, ARRAY_SIZE(android_scaler_available_processed_sizes), android_scaler_available_processed_sizes); int64_t android_scaler_available_raw_min_durations[] = {1}; m.addInt64(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, ARRAY_SIZE(android_scaler_available_raw_min_durations), android_scaler_available_raw_min_durations); int32_t android_scaler_available_raw_sizes[] = {640, 480}; m.addInt32(ANDROID_SCALER_AVAILABLE_RAW_SIZES, ARRAY_SIZE(android_scaler_available_raw_sizes), android_scaler_available_raw_sizes); /* android.sensor */ int32_t android_sensor_info_active_array_size[] = {0, 0, 640, 480}; m.addInt32(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, ARRAY_SIZE(android_sensor_info_active_array_size), android_sensor_info_active_array_size); int32_t android_sensor_info_sensitivity_range[] = {100, 1600}; m.addInt32(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ARRAY_SIZE(android_sensor_info_sensitivity_range), android_sensor_info_sensitivity_range); int64_t android_sensor_info_max_frame_duration[] = {30000000000}; m.addInt64(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, ARRAY_SIZE(android_sensor_info_max_frame_duration), android_sensor_info_max_frame_duration); float android_sensor_info_physical_size[] = {3.2, 2.4}; m.addFloat(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ARRAY_SIZE(android_sensor_info_physical_size), android_sensor_info_physical_size); int32_t android_sensor_info_pixel_array_size[] = {640, 480}; m.addInt32(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, ARRAY_SIZE(android_sensor_info_pixel_array_size), android_sensor_info_pixel_array_size); int32_t android_sensor_orientation[] = {0}; m.addInt32(ANDROID_SENSOR_ORIENTATION, ARRAY_SIZE(android_sensor_orientation), android_sensor_orientation); /* End of static camera characteristics */ mStaticInfo = clone_camera_metadata(m.get()); return 0; }
status_t CameraMetadata::readFromParcel(const Parcel& data, camera_metadata_t** out) { status_t err = OK; camera_metadata_t* metadata = NULL; if (out) { *out = NULL; } // See CameraMetadata::writeToParcel for parcel data layout diagram and explanation. // arg0 = blobSize (int32) int32_t blobSizeTmp = -1; if ((err = data.readInt32(&blobSizeTmp)) != OK) { ALOGE("%s: Failed to read metadata size (error %d %s)", __FUNCTION__, err, strerror(-err)); return err; } const size_t blobSize = static_cast<size_t>(blobSizeTmp); const size_t alignment = get_camera_metadata_alignment(); // Special case: zero blob size means zero sized (NULL) metadata. if (blobSize == 0) { ALOGV("%s: Read 0-sized metadata", __FUNCTION__); return OK; } if (blobSize <= alignment) { ALOGE("%s: metadata blob is malformed, blobSize(%zu) should be larger than alignment(%zu)", __FUNCTION__, blobSize, alignment); return BAD_VALUE; } const size_t metadataSize = blobSize - alignment; // NOTE: this doesn't make sense to me. shouldn't the blob // know how big it is? why do we have to specify the size // to Parcel::readBlob ? ReadableBlob blob; // arg1 = metadata (blob) do { if ((err = data.readBlob(blobSize, &blob)) != OK) { ALOGE("%s: Failed to read metadata blob (sized %zu). Possible " " serialization bug. Error %d %s", __FUNCTION__, blobSize, err, strerror(-err)); break; } // arg2 = offset (blob) // Must be after blob since we don't know offset until after writeBlob. int32_t offsetTmp; if ((err = data.readInt32(&offsetTmp)) != OK) { ALOGE("%s: Failed to read metadata offsetTmp (error %d %s)", __FUNCTION__, err, strerror(-err)); break; } const size_t offset = static_cast<size_t>(offsetTmp); if (offset >= alignment) { ALOGE("%s: metadata offset(%zu) should be less than alignment(%zu)", __FUNCTION__, blobSize, alignment); err = BAD_VALUE; break; } const uintptr_t metadataStart = reinterpret_cast<uintptr_t>(blob.data()) + offset; const camera_metadata_t* tmp = reinterpret_cast<const camera_metadata_t*>(metadataStart); ALOGV("%s: alignment is: %zu, metadata start: %p, offset: %zu", __FUNCTION__, alignment, tmp, offset); metadata = allocate_copy_camera_metadata_checked(tmp, metadataSize); if (metadata == NULL) { // We consider that allocation only fails if the validation // also failed, therefore the readFromParcel was a failure. ALOGE("%s: metadata allocation and copy failed", __FUNCTION__); err = BAD_VALUE; } } while(0); blob.release(); if (out) { ALOGV("%s: Set out metadata to %p", __FUNCTION__, metadata); *out = metadata; } else if (metadata != NULL) { ALOGV("%s: Freed camera metadata at %p", __FUNCTION__, metadata); free_camera_metadata(metadata); } return err; }
int MetadataQueue::producer_cancel(const camera2_frame_queue_dst_ops_t *q, camera_metadata_t *old_buffer) { free_camera_metadata(old_buffer); return OK; }
int MetadataQueue::consumer_free(const camera2_request_queue_src_ops_t *q, camera_metadata_t *old_buffer) { free_camera_metadata(old_buffer); return OK; }