Example #1
0
void PanoramaTracker::run() {
  while (isRunning() && m_scaled.size() <= MAX_TRACKER_FRAMES) {
    QScopedPointer<QtCamGstSample> sample(m_input->sample());

    if (!sample) {
      continue;
    }

    if (!Tracker::isInitialized()) {
      QSize size = QSize(sample->width(), sample->height());
      int m_width = size.width() > 720 ? size.width() / 8 : size.width() / 4;
      int m_height = size.width() > 720 ? size.height() / 8 : size.height() / 4;
      m_inputSize = size;

      // TODO: This should be 5.0 but we fail to stitch sometimes if we set it to 5
      if (!Tracker::initialize(m_width, m_height, 2.0f)) {
	emit error(Panorama::ErrorTrackerInit);
	return;
      }
    }

    // Now we can process the sample:
    const guint8 *src = sample->data();

    QScopedArrayPointer<guint8>
      dst(new guint8[m_inputSize.width() * m_inputSize.height() * 3 / 2]);
    enum libyuv::FourCC fmt;

    switch (sample->format()) {
    case GST_VIDEO_FORMAT_UYVY:
      fmt = libyuv::FOURCC_UYVY;
      break;
    default:
      qCritical() << "Unsupported color format";
      emit error(Panorama::ErrorTrackerFormat);
      return;
    }

    guint8 *y = dst.data(),
      *u = y + m_inputSize.width() * m_inputSize.height(),
      *v = u + m_inputSize.width()/2 * m_inputSize.height()/2;

    if (ConvertToI420(src, sample->size(),
		      y, m_inputSize.width(),
		      u, m_inputSize.width() / 2,
		      v, m_inputSize.width() / 2,
		      0, 0,
		      m_inputSize.width(), m_inputSize.height(),
		      m_inputSize.width(), m_inputSize.height(),
		      libyuv::kRotate0, fmt) != 0) {
      emit error(Panorama::ErrorTrackerConvert);
      return;
    }

    QScopedArrayPointer<guint8> scaled(new guint8[m_width * m_height * 3 / 2]);
    guint8 *ys = scaled.data(),
      *us = ys + m_width * m_height,
      *vs = us + m_width/2 * m_height/2;

    // Now scale:
    // No need for error checking because the function always returns 0
    libyuv::I420Scale(y, m_inputSize.width(),
		      u, m_inputSize.width()/2,
		      v, m_inputSize.width()/2,
		      m_inputSize.width(), m_inputSize.height(),
		      ys, m_width,
		      us, m_width/2,
		      vs, m_width/2,
		      m_width, m_height,
		      libyuv::kFilterBilinear);

    int err = addFrame(scaled.data());

    if (err >= 0) {
      m_scaled.push_back(scaled.take());
      m_frames.push_back(dst.take());
      emit frameCountChanged();
    }
  }
}
JNIEXPORT jboolean JNICALL Java_co_splots_recorder_H264Encoder_encode(
		JNIEnv* env, jobject thiz, jbyteArray data, jobject cameraInfo,
		jlong timeStamp) {
	jclass camera_info_class = (*env)->GetObjectClass(env, cameraInfo);
	jfieldID field_id = (*env)->GetFieldID(env, camera_info_class,
			"orientation", "I");
	jint rotation = (*env)->GetIntField(env, cameraInfo, field_id);
	field_id = (*env)->GetFieldID(env, camera_info_class, "facing", "I");
	jint facing = (*env)->GetIntField(env, cameraInfo, field_id);
	int nv21_length = (*env)->GetArrayLength(env, data);
	uint8_t *nv21_input = (uint8_t *) (*env)->GetByteArrayElements(env, data,
			(jboolean) 0);

	int width = crop_width;
	int height = crop_height;
	int rotate = rotation;

	int converted_half_size = (width + 1) / 2;
	uint8_t convert_i420[width * height + (width * height + 1) / 2];
	uint8_t *convert_i420_y = convert_i420;
	uint8_t *convert_i420_u = convert_i420 + (width * height);
	uint8_t *convert_i420_v = convert_i420_u
			+ (converted_half_size * converted_half_size);

	if (facing == 1) {
		height = 0 - height;
		rotate = (rotate + 180) % 360;
	}
	if (ConvertToI420(nv21_input, nv21_length, convert_i420_y, width,
			convert_i420_v, converted_half_size, convert_i420_u,
			converted_half_size, crop_x, crop_y, src_width, src_height, width,
			height, rotate, FOURCC_NV21) != 0)
		return JNI_FALSE;
	int scaled_half_size = (dest_width + 1) / 2;
	int scaled_data_length = dest_width * dest_height
			+ (dest_width * dest_height + 1) / 2;
	uint8_t scaled_i420[scaled_data_length];
	uint8_t* scaled_i420_y = scaled_i420;
	uint8_t* scaled_i420_u = scaled_i420 + (dest_width * dest_height);
	uint8_t* scaled_i420_v = scaled_i420_u
			+ (scaled_half_size * scaled_half_size);
	if (width != dest_width || height != dest_height) {
		if (I420Scale(convert_i420_y, width, convert_i420_u,
				converted_half_size, convert_i420_v, converted_half_size, width,
				height, scaled_i420_y, dest_width, scaled_i420_u,
				scaled_half_size, scaled_i420_v, scaled_half_size, dest_width,
				dest_height, kFilterNone) != 0)
			return JNI_FALSE;
	} else {
		scaled_i420_y = convert_i420_y;
		scaled_i420_u = convert_i420_u;
		scaled_i420_v = convert_i420_v;
	}
	uint8_t *frame = (uint8_t *) malloc(sizeof(uint8_t) * scaled_data_length);
	uint8_t *frame_y = frame;
	uint8_t *frame_u = frame_y + (dest_width * dest_height);
	uint8_t *frame_v = frame_u + (scaled_half_size * scaled_half_size);
	memcpy(frame_y, scaled_i420_y, dest_width * dest_height);
	memcpy(frame_u, scaled_i420_v, scaled_half_size * scaled_half_size);
	memcpy(frame_v, scaled_i420_u, scaled_half_size * scaled_half_size);
	if (thumbnail == NULL) {
		thumbnail_size = scaled_data_length;
		thumbnail_width = dest_width;
		thumbnail_height = dest_height;
		thumbnail = (uint8_t *) malloc(sizeof(uint8_t) * thumbnail_size);
		uint8_t* thumbnail_y = thumbnail;
		uint8_t* thumbnail_vu = thumbnail + (dest_width * dest_height);
		I420ToNV12(scaled_i420_y, dest_width, scaled_i420_u, scaled_half_size,
				scaled_i420_v, scaled_half_size, thumbnail_y, dest_width,
				thumbnail_vu, dest_width, dest_width, dest_height);
	}
	SSourcePicture pic;
	memset(&pic, 0, sizeof(SSourcePicture));
	pic.iPicWidth = dest_width;
	pic.iPicHeight = dest_width;
	pic.iColorFormat = videoFormatI420;
	pic.iStride[0] = dest_width;
	pic.iStride[1] = pic.iStride[2] = scaled_half_size;
	pic.uiTimeStamp = (long long) timeStamp;
	pic.pData[0] = frame;
	pic.pData[1] = pic.pData[0] + dest_width * dest_height;
	pic.pData[2] = pic.pData[1] + (scaled_half_size * scaled_half_size);

	float current_frame_rate = ((float) frame_count * 1000)
			/ ((float) timeStamp);
	frame_rate_sum += current_frame_rate;
	LOG("current fps: %f", current_frame_rate);
	/*if ((*encoder)->SetOption(encoder, ENCODER_OPTION_FRAME_RATE,
	 (void*) &current_frame_rate) != cmResultSuccess)
	 LOG("Could not update frame rate.");*/

	EVideoFrameType frameType = (*encoder)->EncodeFrame(encoder, &pic, &info);
	if (frameType == videoFrameTypeInvalid)
		return JNI_FALSE;
	LOG("Frame #%d", frame_count);
	frame_count++;
	if (frameType != videoFrameTypeSkip)
		writeInfoToFile(&info);
	return JNI_TRUE;
}