Esempio n. 1
0
/*---------------------------------------------------------------------------*/
int
sicslowmac_dataRequest(void)
{

  _delay_ms(SICSLOW_CORRECTION_DELAY);

   /* create structure to store result. */
  frame_create_params_t params;
  frame_result_t result;

  /* Save the msduHandle in a global variable. */
  msduHandle = packetbuf_attr(PACKETBUF_ATTR_PACKET_ID);

  /* Build the FCF. */
  params.fcf.frameType = DATAFRAME;
  params.fcf.securityEnabled = false;
  params.fcf.framePending = false;
  params.fcf.ackRequired = packetbuf_attr(PACKETBUF_ATTR_RELIABLE);
  params.fcf.panIdCompression = false;

  /* Insert IEEE 802.15.4 (2003) version bit. */
  params.fcf.frameVersion = IEEE802154_2003;

  /* Increment and set the data sequence number. */
  params.seq = macDSN++;

  /* Complete the addressing fields. */
  /**
     \todo For phase 1 the addresses are all long. We'll need a mechanism
     in the rime attributes to tell the mac to use long or short for phase 2.
  */
  params.fcf.srcAddrMode = LONGADDRMODE;
  params.dest_pid = ieee15_4ManagerAddress.get_dst_panid();

  /*
   *  If the output address is NULL in the Rime buf, then it is broadcast
   *  on the 802.15.4 network.
   */
  if(rimeaddr_cmp(packetbuf_addr(PACKETBUF_ADDR_RECEIVER), &rimeaddr_null) ) {
    /* Broadcast requires short address mode. */
    params.fcf.destAddrMode = SHORTADDRMODE;
    params.dest_pid = BROADCASTPANDID;
    params.dest_addr.addr16 = BROADCASTADDR;

  } else {

    /* Phase 1.5 - end nodes send to anyone? */
    memcpy(&params.dest_addr, (uint8_t *)packetbuf_addr(PACKETBUF_ADDR_RECEIVER), LONG_ADDR_LEN);
	
    /* Change from sicslowpan byte arrangement to sicslowmac */
    byte_reverse((uint8_t*)&params.dest_addr.addr64, LONG_ADDR_LEN);

    /* Phase 1 - end nodes only sends to pan coordinator node. */
    /* params.dest_addr.addr64 = ieee15_4ManagerAddress.get_coord_long_addr(); */
    params.fcf.destAddrMode = LONGADDRMODE;
  }

  /* Set the source PAN ID to the global variable. */
  params.src_pid = ieee15_4ManagerAddress.get_src_panid();

  /*
   * Set up the source address using only the long address mode for
   * phase 1.
   */
  params.src_addr.addr64 = ieee15_4ManagerAddress.get_long_addr();

  /* Copy the payload data. */
  params.payload_len = packetbuf_datalen();
  params.payload =  packetbuf_dataptr();

  /* Create transmission frame. */
  frame_tx_create(&params, &result);
  
  /* Log if needed */
  LOG_FRAME(&params, &result);

  /* Retry up to this many times to send the packet if radio is busy */
  uint8_t retry_count = 3;

  while(retry_count) {

	  PRINTF("sicslowmac: sending packet of length %d to radio, result:", result.length);
	  
	    

	  /* Send data to radio. */
	  radio_status_t rv = radio_send_data(result.length, result.frame);

	  if (rv == RADIO_SUCCESS) {
	      PRINTF(" Success\n");

		  return 1; /* True says that the packet could be sent */
      }


	  if (rv != RADIO_WRONG_STATE) {
	      PRINTF(" Failed\n");
	  	  return 0;
      }

	  PRINTF(" Radio busy, retrying\n");

      /** \todo: Fix delay in sicslowmac so they do not block receiving */

       //We have blocking delay here, it is safest this way. BUT doesn't solve the
	   //problem of TX when you are RXing.. as the RX code can't execute!
	  if (retry_count == 3) {
	 	  _delay_ms(10);
	  } else if (retry_count == 2) {
	      _delay_ms(50);
	  } else if (retry_count == 1) {
	      _delay_ms(200);
	  }

	  retry_count--;
  }

  PRINTF("sicslowmac: Unable to send packet, dropped\n");
  return 0;

}
/* Grab frame in YUYV mode */
void V4L2Camera::GrabRawFrame (void *frameBuffer, int maxSize)
{
    LOG_FRAME("V4L2Camera::GrabRawFrame: frameBuffer:%p, len:%d",frameBuffer,maxSize);
    int ret;

    /* DQ */
    memset(&videoIn->buf,0,sizeof(videoIn->buf));
    videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    videoIn->buf.memory = V4L2_MEMORY_MMAP;
    ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
    if (ret < 0) {
        LOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed");
        return;
    }

    nDequeued++;

    // Calculate the stride of the output image (YUYV) in bytes
    int strideOut = videoIn->outWidth << 1;

    // And the pointer to the start of the image
    uint8_t* src = (uint8_t*)videoIn->mem[videoIn->buf.index] + videoIn->capCropOffset;

    LOG_FRAME("V4L2Camera::GrabRawFrame - Got Raw frame (%dx%d) (buf:%d@0x%p, len:%d)",videoIn->format.fmt.pix.width,videoIn->format.fmt.pix.height,videoIn->buf.index,src,videoIn->buf.bytesused);

    /* Avoid crashing! - Make sure there is enough room in the output buffer! */
    if (maxSize < videoIn->outFrameSize) {

        LOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize);

    } else {

        switch (videoIn->format.fmt.pix.pixelformat)
        {
            case V4L2_PIX_FMT_JPEG:
            case V4L2_PIX_FMT_MJPEG:
                if(videoIn->buf.bytesused <= HEADERFRAME1) {
                    // Prevent crash on empty image
                    LOGE("Ignoring empty buffer ...\n");
                    break;
                }

                if (jpeg_decode((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight) < 0) {
                    LOGE("jpeg decode errors\n");
                    break;
                }
                break;

            case V4L2_PIX_FMT_UYVY:
                uyvy_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YVYU:
                yvyu_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YYUV:
                yyuv_to_yuyv((uint8_t*)frameBuffer, strideOut,
                             src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YUV420:
                yuv420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YVU420:
                yvu420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV12:
                nv12_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV21:
                nv21_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV16:
                nv16_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_NV61:
                nv61_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_Y41P:
                y41p_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_GREY:
                grey_to_yuyv((uint8_t*)frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_Y16:
                y16_to_yuyv((uint8_t*)frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA501:
                s501_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA505:
                s505_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SPCA508:
                s508_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_YUYV:
                {
                    int h;
                    uint8_t* pdst = (uint8_t*)frameBuffer;
                    uint8_t* psrc = src;
                    int ss = videoIn->outWidth << 1;
                    for (h = 0; h < videoIn->outHeight; h++) {
                        memcpy(pdst,psrc,ss);
                        pdst += strideOut;
                        psrc += videoIn->format.fmt.pix.bytesperline;
                    }
                }
                break;

            case V4L2_PIX_FMT_SGBRG8: //0
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 0);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SGRBG8: //1
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 1);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SBGGR8: //2
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 2);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_SRGGB8: //3
                bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 3);
                rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut,
                            (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_RGB24:
                rgb_to_yuyv((uint8_t*) frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            case V4L2_PIX_FMT_BGR24:
                bgr_to_yuyv((uint8_t*) frameBuffer, strideOut,
                            src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight);
                break;

            default:
                LOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat);
                break;
        }

        LOG_FRAME("V4L2Camera::GrabRawFrame - Copied frame to destination 0x%p",frameBuffer);
    }

    /* And Queue the buffer again */
    ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
    if (ret < 0) {
        LOGE("GrabPreviewFrame: VIDIOC_QBUF Failed");
        return;
    }

    nQueued++;

    LOG_FRAME("V4L2Camera::GrabRawFrame - Queued buffer");

}