Example #1
0
/*
 * QoS implementation using 8 queues for each priority and round-robin algorithm.
 */
void Switch::switchFrames()
{
	while(1)
	{
		for (int i = 7; i >= 0; --i)
		{
			for (int j = 0; j <= i; ++j)
			{
				r.lock();
				if (!queues[i].empty())
				{
					Frame* frame = queues[i].front();
					queues[i].pop();

					if(prt==true)
						std::cout << frame->toString() << std::endl;

					r.unlock();

					processFrame(frame, frame->sourcePort);
				}
				else
				{
					r.unlock();
					break;
				}
			}
		}
	}
}
Example #2
0
int Listener::listen()
{
	fillUp();

	Switch sl;
		
	NetlinkCom com;
	com.handshake();

	while(1)
	{
		unsigned char data[2048];
		com.receive(data);

		struct ctl* c = (struct ctl*) data;

		//przekopiowanie zawartości przychodzącej wiadomości
		struct KernelComMsg* message = (struct KernelComMsg*) data;

		//port z którego przyszła wiadomość
		std::cout << "From port: " << message->control.port << std::endl;

		//załadowanie bajtów z wiadomości do vector TODO: można szybciej?
		std::vector<unsigned char> v;
		v.assign(message->frame, message->frame + message->control.len);

		//obiekt ramki
		Frame* frame = Frame::createFrame(v);
		std::cout << "Frame:" << std::endl << frame->toString() << std::endl;

		sl.processFrame(frame, message->control.port);
	}

	return 0;
}
Example #3
0
int Dispatcher::send(const Frame& frame, MonotonicTime tx_deadline, MonotonicTime blocking_deadline,
                     CanTxQueue::Qos qos, CanIOFlags flags, uint8_t iface_mask)
{
    if (frame.getSrcNodeID() != getNodeID())
    {
        UAVCAN_ASSERT(0);
        return -ErrLogic;
    }

    CanFrame can_frame;
    if (!frame.compile(can_frame))
    {
        UAVCAN_TRACE("Dispatcher", "Unable to send: frame is malformed: %s", frame.toString().c_str());
        UAVCAN_ASSERT(0);
        return -ErrLogic;
    }
    return canio_.send(can_frame, tx_deadline, blocking_deadline, iface_mask, qos, flags);
}
Example #4
0
TEST(Frame, FrameToString)
{
    using uavcan::Frame;
    using uavcan::RxFrame;

    // RX frame default
    RxFrame rx_frame;
    EXPECT_EQ("prio=255 dtid=65535 tt=3 snid=255 dnid=255 sot=0 eot=0 togl=0 tid=0 payload=[] ts_m=0.000000 ts_utc=0.000000 iface=0",
              rx_frame.toString());

    // RX frame max len
    rx_frame = RxFrame(Frame(uavcan::DataTypeID::MaxPossibleDataTypeIDValue, uavcan::TransferTypeMessageBroadcast,
                             uavcan::NodeID::Max, 0, uavcan::TransferID::Max),
                       uavcan::MonotonicTime::getMax(), uavcan::UtcTime::getMax(), 3);

    uint8_t data[8];
    for (unsigned i = 0; i < sizeof(data); i++)
    {
        data[i] = uint8_t(i);
    }
    rx_frame.setPayload(data, sizeof(data));

    rx_frame.setStartOfTransfer(true);
    rx_frame.setEndOfTransfer(true);
    rx_frame.flipToggle();
    rx_frame.setPriority(uavcan::TransferPriority::NumericallyMax);

    EXPECT_EQ("prio=31 dtid=65535 tt=2 snid=127 dnid=0 sot=1 eot=1 togl=1 tid=31 payload=[00 01 02 03 04 05 06] "
              "ts_m=18446744073709.551615 ts_utc=18446744073709.551615 iface=3",
              rx_frame.toString());

    // Plain frame default
    Frame frame;
    EXPECT_EQ("prio=255 dtid=65535 tt=3 snid=255 dnid=255 sot=0 eot=0 togl=0 tid=0 payload=[]", frame.toString());

    // Plain frame max len
    frame = rx_frame;
    EXPECT_EQ("prio=31 dtid=65535 tt=2 snid=127 dnid=0 sot=1 eot=1 togl=1 tid=31 payload=[00 01 02 03 04 05 06]",
              frame.toString());
}
void printFrame(const Frame & f)  {
	cout << endl <<  "Current frame: " << f.toString(true) << endl;
}
int main(int argc, char *argv[]){
    
	// Option Parser
        Options opt = setupOptions(argc,argv);

	// Read PDB
	System sys;
	sys.readPdb(opt.pdb);

	int centerResidueIndex = -1;
	for (uint i = 0; i < sys.positionSize();i++){
		if ((sys.getPosition(i).getResidueNumber() == opt.resnum) && (sys.getPosition(i).getChainId() == opt.chain)) {
			centerResidueIndex = i;
			break;
		}
	}
	if (centerResidueIndex == -1){
		cerr << "ERROR Couldn't find central residue"<<endl;
		cerr << opt.pdb << endl;
		cerr << opt.resnum << "\t" << opt.chain << endl;
		exit(3);
	}
	cout << "Compute Frame.h"<<endl;
 	Frame f;
	if (!f.computeFrameFromFunctionalGroup(sys.getPosition(centerResidueIndex).getCurrentIdentity())){
			cerr << "Problem creating frame from central residue"<<endl;
			cerr << opt.pdb << endl;
			cerr << opt.resnum << "\t" << opt.chain << endl;
			exit(324);
	}

	if (opt.printFrames){
		cout << "Write out basic frame"<<endl;
		ofstream fout;
		fout.open("basicFrame.py");
		fout << f.toString()<<endl;
		fout.close();
	}


	// Align frame and atoms of sys to origin.
	AtomPointerVector &av = sys.getAtomPointers();
	f.transformToGlobalBasis(av);


	Transforms t;
	for (uint i = 0; i < sys.positionSize();i++){
		if (i == centerResidueIndex) continue;
		Residue &r  = sys.getResidue(i);
		CartesianPoint cp;

		double angleBetweenFrames = MslTools::doubleMax;
		if (r.getResidueName() == "ASP" &&
			    r.atomExists("CG") &&
			    r.atomExists("OD1") &&
			    r.atomExists("OD2")){
				
				cp.setCoor( (r("CG").getX()+r("OD1").getX()+r("OD2").getX()) / 3, 
					    (r("CG").getY()+r("OD1").getY()+r("OD2").getY()) / 3, 
					    (r("CG").getZ()+r("OD1").getZ()+r("OD2").getZ()) / 3);


				if (i != centerResidueIndex) {

					f.transformFromGlobalBasis(r.getAtomPointers());

					Frame floatingFrame;
					floatingFrame.computeFrameFrom3Atoms(r("OD1"),r("CG"),r("OD2"));

					if (opt.printFrames){

						
						char name[80];
						sprintf(name,"aspFrame%1s%03d.py",r.getChainId().c_str(),r.getResidueNumber());

						ofstream fout;
						fout.open(name);
						fout << floatingFrame.toString()<<endl;
						fout.close();
					}



					//cout << floatingFrame.toString()<<endl;

					Matrix m = f.anglesBetweenFrame(floatingFrame);
					angleBetweenFrames = m[2][2];// z vs z

					f.transformToGlobalBasis(r.getAtomPointers());

				}
				
		}

		if (r.getResidueName() == "ASN" &&
			    r.atomExists("CG") &&
			    r.atomExists("OD1") &&
			    r.atomExists("ND2")){
				
				cp.setCoor( (r("CG").getX()+r("OD1").getX()+r("ND2").getX()) / 3, 
					    (r("CG").getY()+r("OD1").getY()+r("ND2").getY()) / 3, 
					    (r("CG").getZ()+r("OD1").getZ()+r("ND2").getZ()) / 3);



				if (i != centerResidueIndex) {
					f.transformFromGlobalBasis(r.getAtomPointers());
					Frame floatingFrame;
					floatingFrame.computeFrameFrom3Atoms(r("OD1"),r("CG"),r("ND2"));

					if (opt.printFrames){
						char name[80];
						sprintf(name,"asnFrame%1s%03d.py",r.getChainId().c_str(),r.getResidueNumber());

						ofstream fout;
						fout.open(name);
						fout << floatingFrame.toString()<<endl;
						fout.close();
					}

					Matrix m = f.anglesBetweenFrame(floatingFrame);
					angleBetweenFrames = m[2][2];// z vs z
					f.transformToGlobalBasis(r.getAtomPointers());
				}
				
		}


		if (r.getResidueName() == "GLU" &&
			    r.atomExists("CD") &&
			    r.atomExists("OE1") &&
			    r.atomExists("OE2")){
				
				cp.setCoor( (r("CD").getX()+r("OE1").getX()+r("OE2").getX()) / 3, 
					    (r("CD").getY()+r("OE1").getY()+r("OE2").getY()) / 3, 
					    (r("CD").getZ()+r("OE1").getZ()+r("OE2").getZ()) / 3);



				if (i != centerResidueIndex) {
					f.transformFromGlobalBasis(r.getAtomPointers());
					Frame floatingFrame;
					floatingFrame.computeFrameFrom3Atoms(r("OE1"),r("CD"),r("OE2"));

					if (opt.printFrames){
						char name[80];
						sprintf(name,"gluFrame%1s%03d.py",r.getChainId().c_str(),r.getResidueNumber());

						ofstream fout;
						fout.open(name);
						fout << floatingFrame.toString()<<endl;
						fout.close();
					}

					Matrix m = f.anglesBetweenFrame(floatingFrame);
					angleBetweenFrames = m[2][2];// z vs z
					f.transformToGlobalBasis(r.getAtomPointers());
				}
				
		}

		if (r.getResidueName() == "GLN" &&
			    r.atomExists("CD") &&
			    r.atomExists("OE1") &&
			    r.atomExists("NE2")){
				
				cp.setCoor( (r("CD").getX()+r("OE1").getX()+r("NE2").getX()) / 3, 
					    (r("CD").getY()+r("OE1").getY()+r("NE2").getY()) / 3, 
					    (r("CD").getZ()+r("OE1").getZ()+r("NE2").getZ()) / 3);


				if (i != centerResidueIndex) {
					f.transformFromGlobalBasis(r.getAtomPointers());
					Frame floatingFrame;
					floatingFrame.computeFrameFrom3Atoms(r("OE1"),r("CD"),r("NE2"));
					if (opt.printFrames){
						char name[80];
						sprintf(name,"glnFrame%1s%03d.py",r.getChainId().c_str(),r.getResidueNumber());

						ofstream fout;
						fout.open(name);
						fout << floatingFrame.toString()<<endl;
						fout.close();
					}

					Matrix m = f.anglesBetweenFrame(floatingFrame);
					angleBetweenFrames = m[2][2];// z vs z
					f.transformToGlobalBasis(r.getAtomPointers());
				}

				
		}

		
		SphericalPoint spRes = t.transform(cp);
		if (angleBetweenFrames == MslTools::doubleMax){
			angleBetweenFrames = 0;
		} else {
			if (angleBetweenFrames > 90){
				angleBetweenFrames = 180 - angleBetweenFrames;
			}

		}

 		fprintf(stdout, "RES %10s %1s %04d %3s %8.3f %8.3f %8.3f %8.3f\n",MslTools::getFileName(opt.pdb).c_str(),r.getChainId().c_str(),r.getResidueNumber(),r.getResidueName().c_str(),spRes.getRadius(), spRes.getSigma(),spRes.getTheta(),angleBetweenFrames*M_PI/180);


		AtomPointerVector &ats = r.getAtomPointers();
		for (uint j = 0; j < ats.size();j++){
			SphericalPoint sp = t.transform(ats(j).getCoor());
			
			//fprintf(stdout, "ATM %10s %1s %04d %3s %4s %8.3f %8.3f %8.3f\n",MslTools::getFileName(opt.pdb).c_str(),ats(j).getChainId().c_str(),ats(j).getResidueNumber(),r.getResidueName().c_str(),ats(j).getName().c_str(),sp.getRadius(), sp.getSigma(),sp.getTheta());
			Residue &cent = sys.getPosition(centerResidueIndex).getCurrentIdentity();
 			if (cent.getResidueName() == "LYS"){
	 			double lysDihedral = 0;
 				double lysAngle    = 0;
 				lysDihedral = cent("CD").getCoor().dihedral(cent("CE").getCoor(),cent("NZ").getCoor(),ats(j).getCoor());
 				lysAngle    = cent("CE").getCoor().angle(cent("NZ").getCoor(),ats(j).getCoor());
				fprintf(stdout, "ATM %10s %04d %1s %04d %3s %4s %8.3f %8.3f %8.3f %8.3f %8.3f\n",MslTools::getFileName(opt.pdb).c_str(),opt.resnum,ats(j).getChainId().c_str(),ats(j).getResidueNumber(),r.getResidueName().c_str(),ats(j).getName().c_str(),sp.getRadius(), sp.getSigma(),sp.getTheta(),lysAngle,lysDihedral);
 			}
			else {
				// JEDONALD WAY..
				fprintf(stdout, "ATM %10s %04d %1s %04d %3s %4s %8.3f %8.3f %8.3f %8.3f\n",MslTools::getFileName(opt.pdb).c_str(),opt.resnum,ats(j).getChainId().c_str(),ats(j).getResidueNumber(),r.getResidueName().c_str(),ats(j).getName().c_str(),sp.getRadius(), sp.getSigma(),sp.getTheta(), angleBetweenFrames*M_PI/180);
			}
		}

		
	}
	
		
	cout << "Done."<<endl;
}
Frame * Decoder::decodeAudio2(Packet & packet) {
  LOGTRACEMETHOD("Decode Audio");
  LOGDEBUG(packet.toString());
  //        Frame frame;
  Frame * frame = new Frame();
  int size = packet.packet->size;
  int samples_size = 192000;//AVCODEC_MAX_AUDIO_FRAME_SIZE;
  int bps = av_get_bytes_per_sample(ctx->sample_fmt);
  //uint8_t* t=(uint8_t*)av_malloc(100);
  //uint8_t *outbuf = static_cast<uint8_t*> (av_malloc(samples_size));
  //int len = avcodec_decode_audio3(ctx, (short *) outbuf, &samples_size, packet.packet);
  int len = avcodec_decode_audio4(ctx, frame->getAVFrame(), &samples_size, packet.packet);

  //@TODO: this is a hack, because the decoder changes the TimeBase after the first packet was decoded
  if (_next_pts == AV_NOPTS_VALUE) {
#ifdef USE_TIME_BASE_Q
    _next_pts = av_rescale_q(packet.getPts(), packet.getTimeBase(), AV_TIME_BASE_Q);
#else
    _next_pts = av_rescale_q(packet.getPts(), packet.getTimeBase(), ctx->time_base);
#endif

    LOGDEBUG("setting last pts to " << _next_pts << " ctxtb=" << ctx->time_base.num << "/" << ctx->time_base.den
    << " ptb=" << packet.getTimeBase().num << "/" << packet.getTimeBase().den);
  }
  LOGDEBUG("DecodingLength:" << len << " PacketSize:" << packet.getSize() << "SampleSize:" << samples_size << "FrameSize:" << ctx->frame_size * ctx->channels);
  if (len < 0 /*||ctx->channels<=0||samples_size<=0*/) {
    LOGERROR("Error while decoding audio Frame");
    //av_free(outbuf);
    return new Frame();
  }
  //Frame * frame = new Frame(outbuf,samples_size);
  if (samples_size > 0) {
    frame->setFinished(true);
  } else {
    frame->setFinished(false);
  }
  size -= len;

  frame->_allocated = true;
  //frame->getAVFrame()->nb_samples=samples_size;
  //  frame._buffer = outbuf;
  frame->stream_index = packet.packet->stream_index;


  frame->setPts(_next_pts);
  //frame->setDts(_next_pts);
  AVRational ar;
  ar.num = 1;
  ar.den = ctx->sample_rate;
#ifdef USE_TIME_BASE_Q
  int64_t dur = av_rescale_q(samples_size, ar, AV_TIME_BASE_Q);
  frame->duration = dur;
  frame->setTimeBase(AV_TIME_BASE_Q);
#else
  //  int64_t dur = av_rescale_q(samples_size, packet.getTimeBase(), ar);
  //samples_size=max(1,samples_size);
  int64_t dur = ((int64_t) AV_TIME_BASE / bps * samples_size) / (ctx->sample_rate * ctx->channels);
  AVRational arbase;
  arbase.num = 1;
  arbase.den = AV_TIME_BASE;

  frame->duration = av_rescale_q(dur, arbase, ar);
  frame->setTimeBase(ar);

  //  _last_pts += frame->duration;
#endif
  _last_pts = _next_pts;
  _next_pts += frame->duration;
  frame->pos = packet.packet->pos;
  //  frame->duration = packet.packet->duration;
  /*
  frame->_size = samples_size;
  frame->_buffer=outbuf;
  */
  frame->_type = AVMEDIA_TYPE_AUDIO;
  frame->channels = ctx->channels;
  frame->sample_rate = ctx->sample_rate;
  //frame->dumpHex();
  LOGDEBUG(frame->toString());
  //  frame->dumpHex();
  pushFrame(new Frame(*frame));

  return frame;
}
Frame * Decoder::decodeVideo2(Packet & packet) {
  LOGTRACEMETHOD("Decode Video");
  if (false && !_pix_fmt_converter) {
    Format in;
    in.width = ctx->width;
    in.height = ctx->height;
    in.pixel_format = ctx->pix_fmt;
    _output_format = in;
    _output_format.pixel_format = STD_PIX_FMT;
    _pix_fmt_converter = new PixelFormatConverter(in, _output_format);
    _pix_fmt_converter->open();
  }
  //Ptr<Frame> tmp_frame = new Frame(ctx->pix_fmt, ctx->width, ctx->height, false);
  //Frame * frame = new Frame(ctx->pix_fmt, ctx->width, ctx->height);
  Frame * frame = new Frame();
  int _frameFinished = 0;
  int len = packet.packet->size;
  LOGDEBUG(packet.toString());

  //  while (len > 0) {
  //    logdebug("Decode Packet");
  int bytesDecoded = 0;
  //if (ctx->codec_id > -1) {
  bytesDecoded = avcodec_decode_video2(ctx, frame->getAVFrame(), &_frameFinished, packet.packet);
  //}
  if (_frameFinished) {

    //_pix_fmt_converter->process(*tmp_frame, *frame);
    if (ctx->coded_frame) {
      LOGDEBUG("DeCodedFrameQuality:" << ctx->coded_frame->quality);
      LOGDEBUG("Interlaced:" << ctx->coded_frame->interlaced_frame);
      LOGDEBUG("topfieldfirst:" << ctx->coded_frame->top_field_first);
      //LOGDEBUG("PictureType:" << av_get_pict_type_char(ctx->coded_frame->pict_type));
    }
  }
  //@TODO: this is a hack, because the decoder changes the TimeBase after the first packet was decoded
  if (false && _last_pts == AV_NOPTS_VALUE) {
#ifdef USE_TIME_BASE_Q
    _last_pts = av_rescale_q(packet.getPts(), packet.getTimeBase(), AV_TIME_BASE_Q);
#else
    _last_pts = av_rescale_q(packet.getPts(), packet.getTimeBase(), _frame_rate);
#endif
    LOGDEBUG("setting last pts to " << _last_pts << " ctxtb=" << _frame_rate.num << "/" << _frame_rate.den
    << " ptb=" << packet.getTimeBase().num << "/" << packet.getTimeBase().den);
  }

  LOGDEBUG("BytesDecoded:" << bytesDecoded);
  frame->setPixelAspectRatio(ctx->sample_aspect_ratio);
  LOGDEBUG("PAR " << ctx->sample_aspect_ratio.num << "/" << ctx->sample_aspect_ratio.den);
  LOGDEBUG("RES " << ctx->coded_width << "/" << ctx->coded_height);
  AVRational display_aspect_ratio;
  av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
  ctx->width * ctx->sample_aspect_ratio.num,
  ctx->height * ctx->sample_aspect_ratio.den,
  1024 * 1024);
  LOGDEBUG("DAR " << display_aspect_ratio.num << "/" << display_aspect_ratio.den);

  if (bytesDecoded < 0) {
    LOGERROR("Error while decoding frame");
  }
  /**
   * if frame is not finished, returns the blank frame
   * the calling process of decode must ensure to check if the returning frame isFinished by calling the Method isFinished()
   */
  if (!_frameFinished) {
    LOGDEBUG("frame not finished");
    return frame;
  }
  frame->setStorageAspectRatio(ctx->coded_width, ctx->coded_height);
  frame->setPixelAspectRatio(ctx->sample_aspect_ratio);
  frame->setDisplayAspectRatio(display_aspect_ratio);
  len -= bytesDecoded;


  //  }
  /* calculating the Presentation TimeStamp here*/
  frame->setPts(_last_pts);
  //frame->setDts(_last_pts);

#ifdef USE_TIME_BASE_Q
  frame->setTimeBase(AV_TIME_BASE_Q);
  // calculating the duration of the decoded packet
  int64_t dur = av_rescale_q(packet.packet->duration, packet.getTimeBase(), AV_TIME_BASE_Q);
#else
  //  frame->setTimeBase(ctx->time_base);
  // calculating the duration of the decoded packet
  //    int64_t dur = av_rescale_q(packet.packet->duration, packet.getTimeBase(), ctx->time_base);
  //  int64_t tmp_dur=((int64_t)AV_TIME_BASE * ctx->time_base.num * ctx->ticks_per_frame) / ctx->time_base.den;
  AVRational ar;
  ar.num = _frame_rate.den;
  ar.den = _frame_rate.num; //* ctx->ticks_per_frame;
  LOGDEBUG("ticks:" << ctx->ticks_per_frame);
  //  int64_t dur = av_rescale_q(ar.num , ar, ctx->time_base);
  frame->setTimeBase(ar);
  int64_t dur = 1;
#endif

  frame->setFinished(_frameFinished!=0);
  //  frame->_pixFormat = ctx->pix_fmt;
  frame->stream_index = _stream_index;//packet.packet->stream_index;


  frame->duration = dur;
  _last_pts += dur;
  //  if (!_frameFinished) {
  //    return frame;
  //  }

  frame->pos = 0;
  frame->_type = AVMEDIA_TYPE_VIDEO;
  LOGDEBUG(frame->toString());
  pushFrame(new Frame(*frame));
  return frame;
}
Example #9
0
String ID3v2::Tag::genre() const
{
  // TODO: In the next major version (TagLib 2.0) a list of multiple genres
  // should be separated by " / " instead of " ".  For the moment to keep
  // the behavior the same as released versions it is being left with " ".

  if(!d->frameListMap["TCON"].isEmpty() &&
     dynamic_cast<TextIdentificationFrame *>(d->frameListMap["TCON"].front()))
  {
    Frame *frame = d->frameListMap["TCON"].front();

    // ID3v2.4 lists genres as the fields in its frames field list.  If the field
    // is simply a number it can be assumed that it is an ID3v1 genre number.
    // Here was assume that if an ID3v1 string is present that it should be
    // appended to the genre string.  Multiple fields will be appended as the
    // string is built.

    if(d->header.majorVersion() == 4) {
      TextIdentificationFrame *f = static_cast<TextIdentificationFrame *>(frame);
      StringList fields = f->fieldList();

      String genreString;
      bool hasNumber = false;

      for(StringList::ConstIterator it = fields.begin(); it != fields.end(); ++it) {
        bool isNumber = true;
        for(String::ConstIterator charIt = (*it).begin();
            isNumber && charIt != (*it).end();
            ++charIt)
        {
          isNumber = *charIt >= '0' && *charIt <= '9';
        }

        if(!genreString.isEmpty())
          genreString.append(' ');

        if(isNumber) {
          int number = (*it).toInt();
          if(number >= 0 && number <= 255) {
            hasNumber = true;
            genreString.append(ID3v1::genre(number));
          }
        }
        else
          genreString.append(*it);
      }
      if(hasNumber)
        return genreString;
    }

    String s = frame->toString();

    // ID3v2.3 "content type" can contain a ID3v1 genre number in parenthesis at
    // the beginning of the field.  If this is all that the field contains, do a
    // translation from that number to the name and return that.  If there is a
    // string folloing the ID3v1 genre number, that is considered to be
    // authoritative and we return that instead.  Or finally, the field may
    // simply be free text, in which case we just return the value.

    int closing = s.find(")");
    if(s.substr(0, 1) == "(" && closing > 0) {
      if(closing == int(s.size() - 1))
        return ID3v1::genre(s.substr(1, s.size() - 2).toInt());
      else
        return s.substr(closing + 1);
    }
    return s;
  }
  return String::null;
}
Example #10
0
TEST(Frame, ServiceParseCompile)
{
    using uavcan::Frame;
    using uavcan::CanFrame;
    using uavcan::TransferID;
    using uavcan::TransferType;

    Frame frame;

    /*
     * Priority
     * Service Type ID
     * Request Not Response
     * Destination Node ID
     * Service Not Message
     * Source Node ID
     */
    const uint32_t can_id =
        (31 << 24) |    // Priority
        (200 << 16) |   // Service Type ID
        (1 << 15) |     // Request Not Response
        (0x42 << 8) |   // Destination Node ID
        (1 << 7) |      // Service Not Message
        (42 << 0);      // Source Node ID

    const std::string payload_string = "hello\x6a"; // SET = 011, TID = 10

    /*
     * Parse
     */
    // Invalid CAN frames
    ASSERT_FALSE(frame.parse(CanFrame(can_id | CanFrame::FlagRTR, (const uint8_t*)"", 0)));
    ASSERT_FALSE(frame.parse(makeCanFrame(can_id, payload_string, STD)));

    // Valid
    ASSERT_TRUE(frame.parse(makeCanFrame(can_id, payload_string, EXT)));

    EXPECT_EQ(TransferID(10), frame.getTransferID());
    EXPECT_FALSE(frame.isStartOfTransfer());
    EXPECT_TRUE(frame.isEndOfTransfer());
    EXPECT_TRUE(frame.getToggle());
    EXPECT_EQ(uavcan::NodeID(42), frame.getSrcNodeID());
    EXPECT_EQ(uavcan::NodeID(0x42), frame.getDstNodeID());
    EXPECT_EQ(uavcan::TransferTypeServiceRequest, frame.getTransferType());
    EXPECT_EQ(200, frame.getDataTypeID().get());
    EXPECT_EQ(31, frame.getPriority().get());

    EXPECT_EQ(payload_string.length(), frame.getPayloadLen() + 1);
    EXPECT_TRUE(std::equal(frame.getPayloadPtr(), frame.getPayloadPtr() + frame.getPayloadLen(),
                           reinterpret_cast<const uint8_t*>(&payload_string[0])));

    std::cout << frame.toString() << std::endl;

    /*
     * Compile
     */
    CanFrame can_frame;
    ASSERT_TRUE(frame.parse(makeCanFrame(can_id, payload_string, EXT)));

    ASSERT_TRUE(frame.compile(can_frame));
    ASSERT_EQ(can_frame, makeCanFrame(can_id, payload_string, EXT));

    EXPECT_EQ(payload_string.length(), can_frame.dlc);
    EXPECT_TRUE(std::equal(can_frame.data, can_frame.data + can_frame.dlc,
                           reinterpret_cast<const uint8_t*>(&payload_string[0])));

    /*
     * Comparison
     */
    ASSERT_FALSE(Frame() == frame);
    ASSERT_TRUE(Frame() != frame);
    frame = Frame();
    ASSERT_TRUE(Frame() == frame);
    ASSERT_FALSE(Frame() != frame);
}
Example #11
0
TEST(Frame, AnonymousParseCompile)
{
    using uavcan::Frame;
    using uavcan::CanFrame;
    using uavcan::TransferID;
    using uavcan::TransferType;

    Frame frame;

    /*
     * Priority
     * Discriminator
     * Message Type ID
     * Service Not Message
     * Source Node ID
     */
    const uint32_t can_id =
        (16383 << 10) | // Discriminator
        (1 << 8);       // Message Type ID

    const std::string payload_string = "hello\xd4"; // SET = 110, TID = 20

    uavcan::TransferCRC payload_crc;
    payload_crc.add(reinterpret_cast<const uint8_t*>(payload_string.c_str()), unsigned(payload_string.length()));

    /*
     * Parse
     */
    ASSERT_TRUE(frame.parse(makeCanFrame(can_id, payload_string, EXT)));

    EXPECT_EQ(TransferID(20), frame.getTransferID());
    EXPECT_TRUE(frame.isStartOfTransfer());
    EXPECT_TRUE(frame.isEndOfTransfer());
    EXPECT_FALSE(frame.getToggle());
    EXPECT_TRUE(frame.getSrcNodeID().isBroadcast());
    EXPECT_TRUE(frame.getDstNodeID().isBroadcast());
    EXPECT_EQ(uavcan::TransferTypeMessageBroadcast, frame.getTransferType());
    EXPECT_EQ(1, frame.getDataTypeID().get());
    EXPECT_EQ(0, frame.getPriority().get());

    EXPECT_EQ(payload_string.length() - 1, frame.getPayloadLen());
    EXPECT_TRUE(std::equal(frame.getPayloadPtr(), frame.getPayloadPtr() + frame.getPayloadLen(),
                           reinterpret_cast<const uint8_t*>(&payload_string[0])));

    std::cout << frame.toString() << std::endl;

    /*
     * Compile
     */
    const uint32_t DiscriminatorMask = 0x00FFFC00;
    const uint32_t NoDiscriminatorMask = 0xFF0003FF;

    CanFrame can_frame;
    ASSERT_TRUE(frame.parse(makeCanFrame(can_id, payload_string, EXT)));

    ASSERT_TRUE(frame.compile(can_frame));
    ASSERT_EQ(can_id & NoDiscriminatorMask & uavcan::CanFrame::MaskExtID,
              can_frame.id & NoDiscriminatorMask & uavcan::CanFrame::MaskExtID);

    EXPECT_EQ(payload_string.length(), can_frame.dlc);
    EXPECT_TRUE(std::equal(can_frame.data, can_frame.data + can_frame.dlc,
                           reinterpret_cast<const uint8_t*>(&payload_string[0])));

    EXPECT_EQ((can_frame.id & DiscriminatorMask & uavcan::CanFrame::MaskExtID) >> 10, payload_crc.get() & 16383);

    /*
     * Comparison
     */
    ASSERT_FALSE(Frame() == frame);
    ASSERT_TRUE(Frame() != frame);
    frame = Frame();
    ASSERT_TRUE(Frame() == frame);
    ASSERT_FALSE(Frame() != frame);
}
Example #12
0
TEST(Frame, MessageParseCompile)
{
    using uavcan::Frame;
    using uavcan::CanFrame;
    using uavcan::TransferID;
    using uavcan::TransferType;

    Frame frame;

    /*
     * Priority
     * Message Type ID
     * Service Not Message
     * Source Node ID
     */
    const uint32_t can_id =
        (16 << 24) |    // Priority
        (20000 << 8) |  // Message Type ID
        (0 << 7) |      // Service Not Message
        (42 << 0);      // Source Node ID

    const std::string payload_string = "hello\xD4"; // SET = 110, TID = 20

    /*
     * Parse
     */
    // Invalid CAN frames
    ASSERT_FALSE(frame.parse(CanFrame(can_id | CanFrame::FlagRTR, (const uint8_t*)"", 0)));
    ASSERT_FALSE(frame.parse(makeCanFrame(can_id, payload_string, STD)));

    // Valid
    ASSERT_TRUE(frame.parse(makeCanFrame(can_id, payload_string, EXT)));

    EXPECT_EQ(TransferID(20), frame.getTransferID());
    EXPECT_TRUE(frame.isStartOfTransfer());
    EXPECT_TRUE(frame.isEndOfTransfer());
    EXPECT_FALSE(frame.getToggle());
    EXPECT_EQ(uavcan::NodeID(42), frame.getSrcNodeID());
    EXPECT_TRUE(frame.getDstNodeID().isBroadcast());
    EXPECT_EQ(uavcan::TransferTypeMessageBroadcast, frame.getTransferType());
    EXPECT_EQ(20000, frame.getDataTypeID().get());
    EXPECT_EQ(16, frame.getPriority().get());

    EXPECT_EQ(payload_string.length() - 1, frame.getPayloadLen());
    EXPECT_TRUE(std::equal(frame.getPayloadPtr(), frame.getPayloadPtr() + frame.getPayloadLen(),
                           payload_string.begin()));

    std::cout << frame.toString() << std::endl;

    /*
     * Compile
     */
    CanFrame can_frame;
    ASSERT_TRUE(frame.parse(makeCanFrame(can_id, payload_string, EXT)));

    ASSERT_TRUE(frame.compile(can_frame));
    ASSERT_EQ(can_frame, makeCanFrame(can_id, payload_string, EXT));

    EXPECT_EQ(payload_string.length(), can_frame.dlc);
    std::cout << can_frame.toString() << std::endl;
    /*
     * FUN FACT: comparison of uint8_t with char may fail on the character 0xD4 (depending on the locale),
     * because it will be considered a Unicode character. Hence, we do reinterpret_cast<>.
     */
    EXPECT_TRUE(std::equal(can_frame.data, can_frame.data + can_frame.dlc,
                           reinterpret_cast<const uint8_t*>(&payload_string[0])));

    /*
     * Comparison
     */
    ASSERT_FALSE(Frame() == frame);
    ASSERT_TRUE(Frame() != frame);
    frame = Frame();
    ASSERT_TRUE(Frame() == frame);
    ASSERT_FALSE(Frame() != frame);
}