Beispiel #1
0
/*
 * Class:     edu_wpi_first_wpilibj_hal_EncoderJNI
 * Method:    initializeEncoder
 * Signature: (BIBBIBBLjava/nio/IntBuffer;Ljava/nio/IntBuffer;)Ljava/nio/ByteBuffer;
 */
JNIEXPORT jobject JNICALL Java_edu_wpi_first_wpilibj_hal_EncoderJNI_initializeEncoder
(JNIEnv * env, jclass, jbyte port_a_module, jint port_a_pin, jbyte port_a_analog_trigger, jbyte port_b_module, jint port_b_pin, jbyte port_b_analog_trigger, jbyte reverseDirection, jobject index, jobject status)
{
    ENCODERJNI_LOG(logDEBUG) << "Calling ENCODERJNI initializeEncoder";
    ENCODERJNI_LOG(logDEBUG) << "Module A = " << (jint)port_a_module;
    ENCODERJNI_LOG(logDEBUG) << "Pin A = " << port_a_pin;
    ENCODERJNI_LOG(logDEBUG) << "Analog Trigger A = " << (jint)port_a_analog_trigger;
    ENCODERJNI_LOG(logDEBUG) << "Module B = " << (jint)port_b_module;
    ENCODERJNI_LOG(logDEBUG) << "Pin B = " << port_b_pin;
    ENCODERJNI_LOG(logDEBUG) << "Analog Trigger B = " << (jint)port_b_analog_trigger;
    ENCODERJNI_LOG(logDEBUG) << "Reverse direction = " << (jint)reverseDirection;
    jint * indexPtr = (jint*)env->GetDirectBufferAddress(index);
    ENCODERJNI_LOG(logDEBUG) << "Index Ptr = " << indexPtr;
    jint * statusPtr = (jint*)env->GetDirectBufferAddress(status);
    ENCODERJNI_LOG(logDEBUG) << "Status Ptr = " << statusPtr;
    void** encoderPtr = (void**)new unsigned char[4];
    *encoderPtr = initializeEncoder(port_a_module, port_a_pin, port_a_analog_trigger,
                                    port_b_module, port_b_pin, port_b_analog_trigger,
                                    reverseDirection, indexPtr, statusPtr);

    ENCODERJNI_LOG(logDEBUG) << "Index = " << *indexPtr;
    ENCODERJNI_LOG(logDEBUG) << "Status = " << *statusPtr;
    ENCODERJNI_LOG(logDEBUG) << "ENCODER Ptr = " << *encoderPtr;
    return env->NewDirectByteBuffer( encoderPtr, 4);
}
Beispiel #2
0
/**
 * Common initialization code for Encoders.
 * This code allocates resources for Encoders and is common to all constructors.
 *
 * The counter will start counting immediately.
 *
 * @param reverseDirection If true, counts down instead of up (this is all relative)
 * @param encodingType either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If 4X is
 * selected, then an encoder FPGA object is used and the returned counts will be 4x the encoder
 * spec'd value since all rising and falling edges are counted. If 1X or 2X are selected then
 * a counter object will be used and the returned value will either exactly match the spec'd count
 * or be double (2x) the spec'd count.
 */
void Encoder::InitEncoder(bool reverseDirection, EncodingType encodingType)
{
	m_table = NULL;
	m_encodingType = encodingType;
	m_index = 0;
	switch (encodingType)
	{
		case k4X:
		{
			m_encodingScale = 4;
			if (m_aSource->StatusIsFatal())
			{
				CloneError(m_aSource);
				return;
			}
			if (m_bSource->StatusIsFatal())
			{
				CloneError(m_bSource);
				return;
			}
			int32_t status = 0;
			m_encoder =  initializeEncoder(m_aSource->GetModuleForRouting(), m_aSource->GetChannelForRouting(),
										   m_aSource->GetAnalogTriggerForRouting(),
										   m_bSource->GetModuleForRouting(), m_bSource->GetChannelForRouting(),
										   m_bSource->GetAnalogTriggerForRouting(),
										   reverseDirection, &m_index, &status);
			  wpi_setErrorWithContext(status, getHALErrorMessage(status));
			m_counter = NULL;
			SetMaxPeriod(.5);
			break;
		}
		case k1X:
		case k2X:
		{
			m_encodingScale = encodingType == k1X ? 1 : 2;
			m_counter = new Counter(m_encodingType, m_aSource, m_bSource, reverseDirection);
			m_index = m_counter->GetFPGAIndex();
			break;
		}
		default:
			wpi_setErrorWithContext(-1, "Invalid encodingType argument");
			break;
	}
	m_distancePerPulse = 1.0;
	m_pidSource = kDistance;

	HALReport(HALUsageReporting::kResourceType_Encoder, m_index, encodingType);
	LiveWindow::GetInstance()->AddSensor("Encoder", m_aSource->GetChannelForRouting(), this);
}
Beispiel #3
0
/*
 * Class:     edu_wpi_first_wpilibj_hal_EncoderJNI
 * Method:    initializeEncoder
 * Signature: (BIZBIZZLjava/nio/IntBuffer;)J
 */
JNIEXPORT jlong JNICALL Java_edu_wpi_first_wpilibj_hal_EncoderJNI_initializeEncoder
  (JNIEnv * env, jclass, jbyte port_a_module, jint port_a_pin, jboolean port_a_analog_trigger, jbyte port_b_module, jint port_b_pin, jboolean port_b_analog_trigger, jboolean reverseDirection, jobject index)
{
	ENCODERJNI_LOG(logDEBUG) << "Calling ENCODERJNI initializeEncoder";
	ENCODERJNI_LOG(logDEBUG) << "Module A = " << (jint)port_a_module;
	ENCODERJNI_LOG(logDEBUG) << "Pin A = " << port_a_pin;
	ENCODERJNI_LOG(logDEBUG) << "Analog Trigger A = " << (jint)port_a_analog_trigger;
	ENCODERJNI_LOG(logDEBUG) << "Module B = " << (jint)port_b_module;
	ENCODERJNI_LOG(logDEBUG) << "Pin B = " << port_b_pin;
	ENCODERJNI_LOG(logDEBUG) << "Analog Trigger B = " << (jint)port_b_analog_trigger;
	ENCODERJNI_LOG(logDEBUG) << "Reverse direction = " << (jint)reverseDirection;
	jint * indexPtr = (jint*)env->GetDirectBufferAddress(index);
	ENCODERJNI_LOG(logDEBUG) << "Index Ptr = " << indexPtr;
	int32_t status = 0;
	void* encoder = initializeEncoder(port_a_module, port_a_pin, port_a_analog_trigger,
							  port_b_module, port_b_pin, port_b_analog_trigger,
							  reverseDirection, indexPtr, &status);

	ENCODERJNI_LOG(logDEBUG) << "Index = " << *indexPtr;
	ENCODERJNI_LOG(logDEBUG) << "Status = " << status;
	ENCODERJNI_LOG(logDEBUG) << "ENCODER Ptr = " << encoder;
	CheckStatus(env, status);
	return (jlong)encoder;
}
void LibavStreamer::initialize(const cv::Mat &img)
{
  // Load format
  format_context_ = avformat_alloc_context();
  if (!format_context_)
  {
    async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                         connection_,
                                                                                                         NULL, NULL);
    throw std::runtime_error("Error allocating ffmpeg format context");
  }
  output_format_ = av_guess_format(format_name_.c_str(), NULL, NULL);
  if (!output_format_)
  {
    async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                         connection_,
                                                                                                         NULL, NULL);
    throw std::runtime_error("Error looking up output format");
  }
  format_context_->oformat = output_format_;

  // Load codec
  if (codec_name_.empty()) // use default codec if none specified
    codec_ = avcodec_find_encoder(output_format_->video_codec);
  else
    codec_ = avcodec_find_encoder_by_name(codec_name_.c_str());
  if (!codec_)
  {
    async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                         connection_,
                                                                                                         NULL, NULL);
    throw std::runtime_error("Error looking up codec");
  }
  video_stream_ = avformat_new_stream(format_context_, codec_);
  if (!video_stream_)
  {
    async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                         connection_,
                                                                                                         NULL, NULL);
    throw std::runtime_error("Error creating video stream");
  }
  codec_context_ = video_stream_->codec;

  // Set options
  avcodec_get_context_defaults3(codec_context_, codec_);

  codec_context_->codec_id = output_format_->video_codec;
  codec_context_->bit_rate = bitrate_;

  codec_context_->width = output_width_;
  codec_context_->height = output_height_;
  codec_context_->delay = 0;

  video_stream_->time_base.num = 1;
  video_stream_->time_base.den = 1000;

  codec_context_->time_base.num = 1;
  codec_context_->time_base.den = 1;
  codec_context_->gop_size = gop_;
  codec_context_->pix_fmt = PIX_FMT_YUV420P;
  codec_context_->max_b_frames = 0;

  // Quality settings
  codec_context_->qmin = qmin_;
  codec_context_->qmax = qmax_;

  initializeEncoder();

  // Some formats want stream headers to be separate
  if (format_context_->oformat->flags & AVFMT_GLOBALHEADER)
    codec_context_->flags |= CODEC_FLAG_GLOBAL_HEADER;

  // Open Codec
  if (avcodec_open2(codec_context_, codec_, NULL) < 0)
  {
    async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                         connection_,
                                                                                                         NULL, NULL);
    throw std::runtime_error("Could not open video codec");
  }

  // Allocate frame buffers
  frame_ = avcodec_alloc_frame();
  tmp_picture_ = new AVPicture;
  picture_ = new AVPicture;
  int ret = avpicture_alloc(picture_, codec_context_->pix_fmt, output_width_, output_height_);
  if (ret < 0)
  {
    async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                         connection_,
                                                                                                         NULL, NULL);
    throw std::runtime_error("Could not allocate picture frame");
  }
  *((AVPicture *)frame_) = *picture_;

  output_format_->flags |= AVFMT_NOFILE;

  // Generate header
  std::vector<uint8_t> header_buffer;
  std::size_t header_size;
  uint8_t *header_raw_buffer;
  // define meta data
  av_dict_set(&format_context_->metadata, "author", "ROS web_video_server", 0);
  av_dict_set(&format_context_->metadata, "title", topic_.c_str(), 0);

  if (avio_open_dyn_buf(&format_context_->pb) >= 0)
  {
    if (avformat_write_header(format_context_, NULL) < 0)
    {
      async_web_server_cpp::HttpReply::stock_reply(async_web_server_cpp::HttpReply::internal_server_error)(request_,
                                                                                                           connection_,
                                                                                                           NULL, NULL);
      throw std::runtime_error("Error openning dynamic buffer");
    }
    header_size = avio_close_dyn_buf(format_context_->pb, &header_raw_buffer);

    // copy header buffer to vector
    header_buffer.resize(header_size);
    memcpy(&header_buffer[0], header_raw_buffer, header_size);

    av_free(header_raw_buffer);
  }

  // Send response headers
  async_web_server_cpp::HttpReply::builder(async_web_server_cpp::HttpReply::ok).header("Connection", "close").header(
      "Server", "web_video_server").header("Cache-Control",
                                           "no-cache, no-store, must-revalidate, pre-check=0, post-check=0, max-age=0").header(
      "Pragma", "no-cache").header("Expires", "0").header("Max-Age", "0").header("Trailer", "Expires").header(
      "Content-type", content_type_).header("Access-Control-Allow-Origin", "*").write(connection_);

  // Send video stream header
  connection_->write_and_clear(header_buffer);
}