void GeneratedHeaders_Test7_Helper::__delegateVistor(odcore::data::Container &c, odcore::base::Visitor &v, bool &successfullyDelegated) {
    successfullyDelegated = false;
    if (c.getDataType() == testpackage::Test7A::ID()) {
        testpackage::Test7A payload = c.getData<testpackage::Test7A>();
        payload.accept(v);
        successfullyDelegated = true;
    }
    if (c.getDataType() == testpackage::Test7B::ID()) {
        testpackage::Test7B payload = c.getData<testpackage::Test7B>();
        payload.accept(v);
        successfullyDelegated = true;
    }
}
void GeneratedHeaders_Test2_Helper::__delegateVistor(odcore::data::Container &c, odcore::base::Visitor &v, bool &successfullyDelegated) {
    successfullyDelegated = false;
    if (c.getDataType() == Test2::ID()) {
        Test2 payload = c.getData<Test2>();
        payload.accept(v);
        successfullyDelegated = true;
    }
    if (c.getDataType() == Test2b::ID()) {
        Test2b payload = c.getData<Test2b>();
        payload.accept(v);
        successfullyDelegated = true;
    }
}
Esempio n. 3
0
void Ivrule::nextContainer(odcore::data::Container &a_container)
{
  if(!m_initialised) {
    return;
  }
  if(a_container.getDataType() == opendlv::perception::Environment::ID()) {
    opendlv::perception::Environment message = a_container.getData<opendlv::perception::Environment>();
    ReadEnvironment(message);
  } else  if (a_container.getDataType() == opendlv::proxy::reverefh16::VehicleSpeed::ID()) {
    auto propulsion = a_container.getData<opendlv::proxy::reverefh16::VehicleSpeed>();
    double speedKph = propulsion.getPropulsionShaftVehicleSpeed();
    m_speed = static_cast<float>(speedKph / 3.6);
  }
}
Esempio n. 4
0
void Gcdc2016::nextContainer(odcore::data::Container &a_c)
{
  if(a_c.getDataType() == opendlv::proxy::ActuationRequest::ID()){
    auto actuationRequest = a_c.getData<opendlv::proxy::ActuationRequest>();

    if (actuationRequest.getIsValid())
    {
       double braking = 0.0;
       double acceleration = 0.0;
        double roadWheelAngle = actuationRequest.getSteering();
        if (actuationRequest.getAcceleration() > 0)
        {
          acceleration = actuationRequest.getAcceleration();
        }
        else
        {
          braking = actuationRequest.getAcceleration();
        }
       m_vehicle->SetRoadVheelAngle(roadWheelAngle+0.1);
       m_vehicle->SetDeceleraionRequest(braking);
       m_vehicle->SetThrottlePedalPosition(acceleration+1);

    }
  }
}
void ProxyLEDStrip::nextContainer(odcore::data::Container &c) {
    if (c.getDataType() == opendlv::logic::action::AimPoint::ID()) {
        auto aimPoint = c.getData<opendlv::logic::action::AimPoint>();
        m_angle = aimPoint.getAzimuthAngle();
        cout << "[" << getName() << "] Direction azimuth: " << m_angle << std::endl;
    }
}
Esempio n. 6
0
/**
 * Receives raw vehicle message from proxy.
 * Sends raw vehicle message to linguistics handlers.
 */
void Audition::nextContainer(odcore::data::Container &c)
{
  // cout << "Received container of type " << c.getDataType() 
  //     << " sent at " << c.getSentTimeStamp().getYYYYMMDD_HHMMSSms() 
  //     <<" received at " << c.getReceivedTimeStamp().getYYYYMMDD_HHMMSSms()
  //     << endl;
  if(c.getDataType() == opendlv::proxy::V2vInbound::ID()){
    // std::cout << "Received a message of type ."
    opendlv::proxy::V2vInbound message = c.getData<opendlv::proxy::V2vInbound>();
    std::string dataString = message.getData();
    std::vector<unsigned char> data(dataString.begin(), dataString.end());
    
    // string value = message.getValue();
    // vector<string> tokens = odcore::strings::StringToolbox::split(value, '|');


    // std::cout << std::to_string(static_cast<unsigned char>(*data.begin())) 
    //     << std::endl;
    // std::vector<unsigned char> const bytes = data;
    // std::stringstream ss;
    // for (uint i = 0; i < bytes.size(); i++) {
    //     ss << std::to_string(bytes.at(i));
    //     ss << "|";
    // }
    // std::cout<<ss.str()<<std::endl;
    // std::cout<<value<<std::endl;

    unsigned char v2vMsgId = data.at(0);
    // std::cout <<  std::to_string(v2vMsgId)<<std::endl;
    std::string v2vMsgType;
    switch(v2vMsgId)
    {
      case 1:
        v2vMsgType = "denm";
      break;
      case 2:
        v2vMsgType = "cam";
      break;
      case 10:
        v2vMsgType = "iclcm";
      break;
      default:
        std::cout << "Received invalid message ID.";
    }
    if(!v2vMsgType.empty())
    {
      // std::cout<<"Sorted and sending to next layer.\n";
      opendlv::sensation::Voice nextMessage(v2vMsgType, message.getSize(), dataString);
      odcore::data::Container container(nextMessage);
      getConference().send(container);
    }

  }
}
Esempio n. 7
0
    void DataSpy::nextContainer(odcore::data::Container &c) {
        if (c.getDataType() == opendlv::proxy::reverefh16::Propulsion::ID()) {
            opendlv::proxy::reverefh16::Propulsion data = c.getData<opendlv::proxy::reverefh16::Propulsion>();
            cout << "[dataspy] " << data.LongName() << ": " << data.toString() << endl;
            return;
        }
        if (c.getDataType() == opendlv::proxy::reverefh16::Steering::ID()) {
            opendlv::proxy::reverefh16::Steering data = c.getData<opendlv::proxy::reverefh16::Steering>();
            cout << "[dataspy] " << data.LongName() << ": " << data.toString() << endl;
            return;
        }
        if (c.getDataType() == opendlv::proxy::GpsReading::ID()) {
            opendlv::proxy::GpsReading data = c.getData<opendlv::proxy::GpsReading>();
            cout << "[dataspy] " << data.LongName() << ": " << data.toString() << endl;
            return;
        }


        cout << "[dataspy] Received container of type " << c.getDataType() << endl;
    }
void CanMessageDataStore::add(odcore::data::Container &container)
{
  using namespace automotive;
  if (container.getDataType() == GenericCANMessage::ID()) {
    // TODO: Use CAN mapping to transform a high-level message to a low-level
    // message.

    //    GenericCANMessage gcm = const_cast<odcore::data::Container
    //    &>(container)
    //                            .getData<GenericCANMessage>();
    //    m_canDevice->write(gcm);
  }
}
Esempio n. 9
0
void ProxyV2V::nextContainer(odcore::data::Container &c) {
    if (!m_initialised) {
        return;
    }
    if (c.getDataType() == opendlv::proxy::V2vRequest::ID()) {
        if (m_debug) {
            cout << "[" << getName() << "] Got an outbound message" << endl;
        }

        opendlv::proxy::V2vRequest message = c.getData< opendlv::proxy::V2vRequest >();
        try {
            m_udpsender->send(message.getData());
        }
        catch (string &exception) {
            cerr << "[" << getName() << "] Data could not be sent: " << exception << endl;
        }
    }
}
Esempio n. 10
0
void Relay::nextContainer(odcore::data::Container &a_container)
{
  if (a_container.getDataType() == opendlv::proxy::RelayRequest::ID()) {
    opendlv::proxy::RelayRequest request = 
        a_container.getData<opendlv::proxy::RelayRequest>();

    std::string deviceId = request.getDeviceId();

    if (deviceId != getIdentifier()) {
      return;
    }

    bool relayValue = request.getRelayValue();
    uint8_t relayIndex = request.getRelayIndex();

    m_device->SetValue(relayIndex, relayValue);
  }
}
Esempio n. 11
0
void CheckActuation::nextContainer(odcore::data::Container &a_container)
{
  if (a_container.getDataType() == opendlv::proxy::ActuationRequest::ID()+300){
    opendlv::proxy::ActuationRequest actuationRequest 
        = a_container.getData<opendlv::proxy::ActuationRequest>();
    float acceleration = actuationRequest.getAcceleration();
    float steering = actuationRequest.getSteering();




    // clamp steering
    if (steering < -m_steeringLimit) {
      steering = -m_steeringLimit;
      std::cout << "steering request was capped to " << steering << std::endl;
    }
    else if (steering > m_steeringLimit) {
      steering = m_steeringLimit;
      std::cout << "steering request was capped to " << steering << std::endl;
    }

    // clamp acceleration
    if (acceleration < -m_maxAllowedDeceleration) {
      acceleration = -m_maxAllowedDeceleration;
      std::cout << "acceleration request was capped to " << acceleration << std::endl;
    }
    else if (acceleration > m_accMaxLimit) {
      acceleration = m_accMaxLimit;
      std::cout << "acceleration request was capped to " << acceleration << std::endl;
    }


    actuationRequest.setAcceleration(acceleration);
    actuationRequest.setSteering(steering);

    actuationRequest.setIsValid(true);
    
    odcore::data::Container c(actuationRequest);
    getConference().send(c);
  }
}
Esempio n. 12
0
/**
 * Receives raw images from cameras.
 * Sends an RGB matrix of optical flow.
 */
void OpticalFlow::nextContainer(odcore::data::Container &a_c)
{
  if(a_c.getDataType() != odcore::data::image::SharedImage::ID()){
    return;
  }
  odcore::data::image::SharedImage mySharedImg =
      a_c.getData<odcore::data::image::SharedImage>();
  // std::cout<<mySharedImg.getName()<<std::endl;

  std::shared_ptr<odcore::wrapper::SharedMemory> sharedMem(
      odcore::wrapper::SharedMemoryFactory::attachToSharedMemory(
          mySharedImg.getName()));
  const uint32_t nrChannels = mySharedImg.getBytesPerPixel();
  const uint32_t imgWidth = mySharedImg.getWidth();
  const uint32_t imgHeight = mySharedImg.getHeight();

  IplImage* myIplImage = cvCreateImage(cvSize(imgWidth,imgHeight), IPL_DEPTH_8U,
      nrChannels);
  m_image = cv::Mat(myIplImage);

  if(!sharedMem->isValid()){
    return;
  }

  sharedMem->lock();
  {
    memcpy(m_image.data, sharedMem->getSharedMemory(),
        imgWidth*imgHeight*nrChannels);
  }
  sharedMem->unlock();



  //Converts the image to grayscale and output in "gray"
  cv::cvtColor(m_image, m_grayImage, cv::COLOR_BGR2GRAY);

  //First initialisation of the image
  if(m_prevGrayImage.empty()){
    m_grayImage.copyTo(m_prevGrayImage);
    double x,y;
    for(uint32_t i = 1; i <= m_nAxisPoints; ++i){
        for(uint32_t j = 1; j <= m_nAxisPoints; ++j){
            x = imgWidth*j/(1+m_nAxisPoints);
            y = imgHeight*i/(1+m_nAxisPoints);
            m_staticImagePoints.push_back(cv::Point2f(x,y));
            // constPoints->row(k) << x,y;
        }
    }
  }


  std::vector<uchar> status;
  std::vector<float> err;

  cv::calcOpticalFlowPyrLK(m_prevGrayImage, m_grayImage, m_staticImagePoints,
      m_endImagePoints, status, err, m_searchSize, m_maxLevel, m_termcrit,
      0, m_minEigThreshold);

  for(uint32_t i = 0; i < m_staticImagePoints.size(); i++){
    cv::circle(m_image, m_staticImagePoints[i], 3, cv::Scalar(255,0,0), -1, 8);            
  }
  for(uint32_t i = 0; i < m_endImagePoints.size(); i++){
    cv::circle(m_image, m_endImagePoints[i], 3, cv::Scalar(0,0,255), -1, 8);            
  }
  SendContainer();
  // updateFlow();
  // std::cout<< m_flow << std::endl;

  // if(m_sharedMemory.get() && m_sharedMemory->isValid()){
  //   {
  //     std::cout<< m_size << ", " << m_flow.total() * m_flow.elemSize() << std::endl;
  //     odcore::base::Lock l(m_sharedMemory);
  //     memcpy(static_cast<char *>(m_sharedMemory->getSharedMemory()),m_flow.data,
  //         m_size);
  //   }
  //   odcore::data::Container c(m_outputSharedImage);
  //   getConference().send(c);
  //   // std::cout << "Sent" << std::endl;
  // 
  // }

  // std::cout<< m_outputSharedImage.getName() << std::endl;


  cv::swap(m_prevGrayImage, m_grayImage);
  
  const int32_t windowWidth = 640;
  const int32_t windowHeight = 480;
  cv::Mat display1;
  cv::resize(m_image, display1, cv::Size(windowWidth, windowHeight), 0, 0,
      cv::INTER_CUBIC);
  cv::imshow("Optical Flow", display1); 
  cv::waitKey(1);

  cvReleaseImage(&myIplImage);
}
Esempio n. 13
0
 void SuperComponent::handleUnkownContainer(const odcore::data::dmcp::ModuleDescriptor& md, const odcore::data::Container& container) {
     clog << "(context::base::SuperComponent) Received unknown container " << container.toString() << "from " << md.toString() << endl;
 }
 void MessageToCANDataStore::add(odcore::data::Container &container) {
     if (container.getDataType() == automotive::GenericCANMessage::ID()) {
         GenericCANMessage gcm = container.getData<GenericCANMessage>();
         m_canDevice->write(gcm);
     }
 }
/**
 * Receives images as well the optical flow to calculate the focus of expansion
 */
void DirectionOfMovement::nextContainer(odcore::data::Container &a_c)
{

  if(a_c.getDataType() == odcore::data::image::SharedImage::ID()){
    odcore::data::image::SharedImage mySharedImg =
        a_c.getData<odcore::data::image::SharedImage>();
    // std::cout<<mySharedImg.getName()<<std::endl;

    std::shared_ptr<odcore::wrapper::SharedMemory> sharedMem(
        odcore::wrapper::SharedMemoryFactory::attachToSharedMemory(
            mySharedImg.getName()));
    const uint32_t nrChannels = mySharedImg.getBytesPerPixel();
    const uint32_t imgWidth = mySharedImg.getWidth();
    const uint32_t imgHeight = mySharedImg.getHeight();

    IplImage* myIplImage = cvCreateImage(cvSize(imgWidth,imgHeight), IPL_DEPTH_8U,
        nrChannels);
    cv::Mat tmpImage = cv::Mat(myIplImage);

    if(!sharedMem->isValid()){
      return;
    }

    sharedMem->lock();
    {
      memcpy(tmpImage.data, sharedMem->getSharedMemory(),
          imgWidth*imgHeight*nrChannels);
    }
    sharedMem->unlock();
    m_image.release();
    m_image = tmpImage.clone();
    cvReleaseImage(&myIplImage);
    
    return;
  }
  if(a_c.getDataType() == opendlv::sensation::OpticalFlow::ID()){
    opendlv::sensation::OpticalFlow message = 
        a_c.getData<opendlv::sensation::OpticalFlow>();

    uint16_t nPoints = message.getNumberOfPoints();
    std::vector<opendlv::model::Direction> directions = 
        message.getListOfDirections();
    std::vector<float> u = message.getListOfU();
    std::vector<float> v = message.getListOfV();

    Eigen::MatrixXd flow(nPoints, 4);
    Eigen::MatrixXd A(nPoints,2);
    Eigen::MatrixXd B(nPoints,1);
    for(uint8_t i = 0; i < nPoints; ++i){
      flow.row(i) << directions[i].getAzimuth(), directions[i].getZenith(), 
          u[i], v[i];
    }
    A.col(0) = flow.col(3);
    A.col(1) = -flow.col(2);
    B.col(0) = flow.col(3).cwiseProduct(flow.col(0))-flow.col(1).cwiseProduct(flow.col(2));
    
    // FOE = (A * A^T)^-1 * A^T * B
    Eigen::VectorXd FoeMomentum = ((A.transpose()*A).inverse() * A.transpose() * B) - m_foePix;
    if(FoeMomentum.allFinite()){
      if(FoeMomentum.norm() > 10){
        m_foePix = m_foePix + FoeMomentum/FoeMomentum.norm()*10;
      }
      else{
        m_foePix = m_foePix + FoeMomentum/20;
      }
    }

    // SendContainer();
    
    std::cout<< m_foePix.transpose() << std::endl;
    cv::circle(m_image, cv::Point2f(m_foePix(0),m_foePix(1)), 3, cv::Scalar(0,0,255), -1, 8);
    const int32_t windowWidth = 640;
    const int32_t windowHeight = 480;
    cv::Mat display;
    cv::resize(m_image, display, cv::Size(windowWidth, windowHeight), 0, 0,
      cv::INTER_CUBIC);
    cv::imshow("FOE", display);

    cv::waitKey(1);
    return;
  }
  
}