Ejemplo n.º 1
1
/**
 * Find a compatible camera modes:
 *    1) the same aspect ration as the native display window, which should be a
 *       rotated version of the physical device
 *    2) the smallest resolution in the camera mode list
 * This is to minimize the later color space conversion workload.
 */
bool NDKCamera::MatchCaptureSizeRequest(ANativeWindow* display,
                                        ImageFormat* resView,
                                        ImageFormat* resCap) {
  DisplayDimension disp(ANativeWindow_getWidth(display),
                        ANativeWindow_getHeight(display));
  if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
    disp.Flip();
  }

  ACameraMetadata* metadata;
  CALL_MGR(
      getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(), &metadata));
  ACameraMetadata_const_entry entry;
  CALL_METADATA(getConstEntry(
      metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry));
  // format of the data: format, width, height, input?, type int32
  bool foundIt = false;
  DisplayDimension foundRes(4000, 4000);
  DisplayDimension maxJPG(0, 0);

  for (int i = 0; i < entry.count; i += 4) {
    int32_t input = entry.data.i32[i + 3];
    int32_t format = entry.data.i32[i + 0];
    if (input) continue;

    if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
      DisplayDimension res(entry.data.i32[i + 1],
                           entry.data.i32[i + 2]);
      if (!disp.IsSameRatio(res)) continue;
      if (format == AIMAGE_FORMAT_YUV_420_888 && foundRes > res) {
        foundIt = true;
        foundRes = res;
      } else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
        maxJPG = res;
      }
    }
  }

  if (foundIt) {
    resView->width = foundRes.org_width();
    resView->height = foundRes.org_height();
    resCap->width = maxJPG.org_width();
    resCap->height = maxJPG.org_height();
  } else {
    LOGW("Did not find any compatible camera resolution, taking 640x480");
    if (disp.IsPortrait()) {
      resView->width = 480;
      resView->height = 640;
    } else {
      resView->width = 640;
      resView->height = 480;
    }
    *resCap = *resView;
  }
  resView->format = AIMAGE_FORMAT_YUV_420_888;
  resCap->format = AIMAGE_FORMAT_JPEG;
  return foundIt;
}
Ejemplo n.º 2
0
/**
 * EnumerateCamera()
 *     Loop through cameras on the system, pick up
 *     1) back facing one if available
 *     2) otherwise pick the first one reported to us
 */
void NDKCamera::EnumerateCamera() {
  ACameraIdList* cameraIds = nullptr;
  CALL_MGR(getCameraIdList(cameraMgr_, &cameraIds));

  for (int i = 0; i < cameraIds->numCameras; ++i) {
    const char* id = cameraIds->cameraIds[i];

    ACameraMetadata* metadataObj;
    CALL_MGR(getCameraCharacteristics(cameraMgr_, id, &metadataObj));

    int32_t count = 0;
    const uint32_t* tags = nullptr;
    ACameraMetadata_getAllTags(metadataObj, &count, &tags);
    for (int tagIdx = 0; tagIdx < count; ++tagIdx) {
      if (ACAMERA_LENS_FACING == tags[tagIdx]) {
        ACameraMetadata_const_entry lensInfo = {
            0,
        };
        CALL_METADATA(getConstEntry(metadataObj, tags[tagIdx], &lensInfo));
        CameraId cam(id);
        cam.facing_ = static_cast<acamera_metadata_enum_android_lens_facing_t>(
            lensInfo.data.u8[0]);
        cam.owner_ = false;
        cam.device_ = nullptr;
        cameras_[cam.id_] = cam;
        if (cam.facing_ == ACAMERA_LENS_FACING_BACK) {
          activeCameraId_ = cam.id_;
        }
        break;
      }
    }
    ACameraMetadata_free(metadataObj);
  }

  ASSERT(cameras_.size(), "No Camera Available on the device");
  if (activeCameraId_.length() == 0) {
    // if no back facing camera found, pick up the first one to use...
    activeCameraId_ = cameras_.begin()->second.id_;
  }
  ACameraManager_deleteCameraIdList(cameraIds);
}
Ejemplo n.º 3
0
/**
 * GetSensorOrientation()
 *     Retrieve current sensor orientation regarding to the phone device
 * orientation
 *     SensorOrientation is NOT settable.
 */
bool NDKCamera::GetSensorOrientation(int32_t* facing, int32_t* angle) {
  if (!cameraMgr_) {
    return false;
  }

  ACameraMetadata* metadataObj;
  ACameraMetadata_const_entry face, orientation;
  CALL_MGR(getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(),
                                    &metadataObj));
  CALL_METADATA(getConstEntry(metadataObj, ACAMERA_LENS_FACING, &face));
  cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);

  CALL_METADATA(
      getConstEntry(metadataObj, ACAMERA_SENSOR_ORIENTATION, &orientation));

  LOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);

  ACameraMetadata_free(metadataObj);
  cameraOrientation_ = orientation.data.i32[0];

  if (facing) *facing = cameraFacing_;
  if (angle) *angle = cameraOrientation_;
  return true;
}
Ejemplo n.º 4
0
NDKCamera::NDKCamera()
    : cameraMgr_(nullptr),
      activeCameraId_(""),
      outputContainer_(nullptr),
      captureSessionState_(CaptureSessionState::MAX_STATE),
      cameraFacing_(ACAMERA_LENS_FACING_BACK),
      cameraOrientation_(0),
      exposureTime_(static_cast<int64_t>(0)) {
  valid_ = false;
  requests_.resize(CAPTURE_REQUEST_COUNT);
  memset(requests_.data(), 0, requests_.size() * sizeof(requests_[0]));
  cameras_.clear();
  cameraMgr_ = ACameraManager_create();
  ASSERT(cameraMgr_, "Failed to create cameraManager");

  // Pick up a back-facing camera to preview
  EnumerateCamera();
  ASSERT(activeCameraId_.size(), "Unknown ActiveCameraIdx");

  // Create back facing camera device
  CALL_MGR(openCamera(cameraMgr_, activeCameraId_.c_str(), GetDeviceListener(),
                      &cameras_[activeCameraId_].device_));

  CALL_MGR(registerAvailabilityCallback(cameraMgr_, GetManagerListener()));

  // Initialize camera controls(exposure time and sensitivity), pick
  // up value of 2% * range + min as starting value (just a number, no magic)
  ACameraMetadata* metadataObj;
  CALL_MGR(getCameraCharacteristics(cameraMgr_, activeCameraId_.c_str(),
                                    &metadataObj));
  ACameraMetadata_const_entry val = {
      0,
  };
  camera_status_t status = ACameraMetadata_getConstEntry(
      metadataObj, ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE, &val);
  if (status == ACAMERA_OK) {
    exposureRange_.min_ = val.data.i64[0];
    if (exposureRange_.min_ < kMinExposureTime) {
      exposureRange_.min_ = kMinExposureTime;
    }
    exposureRange_.max_ = val.data.i64[1];
    if (exposureRange_.max_ > kMaxExposureTime) {
      exposureRange_.max_ = kMaxExposureTime;
    }
    exposureTime_ = exposureRange_.value(2);
  } else {
    LOGW("Unsupported ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE");
    exposureRange_.min_ = exposureRange_.max_ = 0l;
    exposureTime_ = 0l;
  }
  status = ACameraMetadata_getConstEntry(
      metadataObj, ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, &val);

  if (status == ACAMERA_OK){
    sensitivityRange_.min_ = val.data.i32[0];
    sensitivityRange_.max_ = val.data.i32[1];

    sensitivity_ = sensitivityRange_.value(2);
  } else {
    LOGW("failed for ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE");
    sensitivityRange_.min_ = sensitivityRange_.max_ = 0;
    sensitivity_ = 0;
  }
  valid_ = true;
}
Ejemplo n.º 5
0
status_t BnCameraService::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case GET_NUMBER_OF_CAMERAS: {
            CHECK_INTERFACE(ICameraService, data, reply);
            reply->writeNoException();
            reply->writeInt32(getNumberOfCameras());
            return NO_ERROR;
        } break;
        case GET_CAMERA_INFO: {
            CHECK_INTERFACE(ICameraService, data, reply);
            CameraInfo cameraInfo = CameraInfo();
            memset(&cameraInfo, 0, sizeof(cameraInfo));
            status_t result = getCameraInfo(data.readInt32(), &cameraInfo);
            reply->writeNoException();
            reply->writeInt32(result);

            // Fake a parcelable object here
            reply->writeInt32(1); // means the parcelable is included
            reply->writeInt32(cameraInfo.facing);
            reply->writeInt32(cameraInfo.orientation);
            return NO_ERROR;
        } break;
        case GET_CAMERA_CHARACTERISTICS: {
            CHECK_INTERFACE(ICameraService, data, reply);
            CameraMetadata info;
            status_t result = getCameraCharacteristics(data.readInt32(), &info);
            reply->writeNoException();
            reply->writeInt32(result);

            // out-variables are after exception and return value
            reply->writeInt32(1); // means the parcelable is included
            info.writeToParcel(reply);
            return NO_ERROR;
        } break;
        case CONNECT: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraClient> cameraClient =
                    interface_cast<ICameraClient>(data.readStrongBinder());
            int32_t cameraId = data.readInt32();
            const String16 clientName = data.readString16();
            int32_t clientUid = data.readInt32();
            sp<ICamera> camera;
            status_t status = connect(cameraClient, cameraId,
                    clientName, clientUid, /*out*/ camera);
            reply->writeNoException();
            reply->writeInt32(status);
            if (camera != NULL) {
                reply->writeInt32(1);
                reply->writeStrongBinder(camera->asBinder());
            } else {
                reply->writeInt32(0);
            }
            return NO_ERROR;
        } break;
        case CONNECT_PRO: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<IProCameraCallbacks> cameraClient =
                interface_cast<IProCameraCallbacks>(data.readStrongBinder());
            int32_t cameraId = data.readInt32();
            const String16 clientName = data.readString16();
            int32_t clientUid = data.readInt32();
            sp<IProCameraUser> camera;
            status_t status = connectPro(cameraClient, cameraId,
                    clientName, clientUid, /*out*/ camera);
            reply->writeNoException();
            reply->writeInt32(status);
            if (camera != NULL) {
                reply->writeInt32(1);
                reply->writeStrongBinder(camera->asBinder());
            } else {
                reply->writeInt32(0);
            }
            return NO_ERROR;
        } break;
        case CONNECT_DEVICE: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraDeviceCallbacks> cameraClient =
                interface_cast<ICameraDeviceCallbacks>(data.readStrongBinder());
            int32_t cameraId = data.readInt32();
            const String16 clientName = data.readString16();
            int32_t clientUid = data.readInt32();
            sp<ICameraDeviceUser> camera;
            status_t status = connectDevice(cameraClient, cameraId,
                    clientName, clientUid, /*out*/ camera);
            reply->writeNoException();
            reply->writeInt32(status);
            if (camera != NULL) {
                reply->writeInt32(1);
                reply->writeStrongBinder(camera->asBinder());
            } else {
                reply->writeInt32(0);
            }
            return NO_ERROR;
        } break;
        case ADD_LISTENER: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraServiceListener> listener =
                interface_cast<ICameraServiceListener>(data.readStrongBinder());
            reply->writeNoException();
            reply->writeInt32(addListener(listener));
            return NO_ERROR;
        } break;
        case REMOVE_LISTENER: {
            CHECK_INTERFACE(ICameraService, data, reply);
            sp<ICameraServiceListener> listener =
                interface_cast<ICameraServiceListener>(data.readStrongBinder());
            reply->writeNoException();
            reply->writeInt32(removeListener(listener));
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}