static int ReadConnectRejectMessage( URLContext * h, NetworkMessage *message ) { SrvConnectRejectMsg * bodyPtr = NULL; /* Allocate the message body */ if( (message->body = av_malloc( sizeof(SrvConnectRejectMsg) )) == NULL ) return AVERROR(ENOMEM); /* Now read from the stream into the message */ bodyPtr = (SrvConnectRejectMsg *)message->body; if( DSReadBuffer( h, (uint8_t *)&bodyPtr->reason, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->timestamp, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)bodyPtr->macAddr, MAC_ADDR_LENGTH ) != MAC_ADDR_LENGTH ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->appVersion, sizeof(unsigned long) ) != sizeof(unsigned long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->minViewerVersion, sizeof(unsigned long) ) != sizeof(unsigned long) ) return AVERROR(EIO); /* Correct the byte ordering */ bodyPtr->reason = av_be2ne32(bodyPtr->reason); bodyPtr->timestamp = av_be2ne32(bodyPtr->timestamp); bodyPtr->appVersion = av_be2ne32(bodyPtr->appVersion); bodyPtr->minViewerVersion = av_be2ne32(bodyPtr->minViewerVersion); return 0; }
static void NToHMessageHeader( MessageHeader *header ) { if( header ) { header->magicNumber = av_be2ne32(header->magicNumber); header->length = av_be2ne32(header->length); header->channelID = av_be2ne32(header->channelID); header->sequence = av_be2ne32(header->sequence); header->messageVersion = av_be2ne32(header->messageVersion); header->checksum = av_be2ne32(header->checksum); header->messageType = av_be2ne32(header->messageType); } }
static int dspicProbe( AVProbeData *p ) { long magicNumber = 0; if( p->buf_size <= sizeof(long) ) return 0; /* Get what should be the magic number field of the first header */ memcpy( &magicNumber, p->buf, sizeof(long) ); /* Adjust the byte ordering */ magicNumber = av_be2ne32(magicNumber); if( magicNumber == DSPacketHeaderMagicNumber ) return AVPROBE_SCORE_MAX; return 0; }
static void HToNMessageHeader( MessageHeader *header, unsigned char *buf ) { MessageHeader tempHeader; int bufIdx = 0; if( header != NULL && buf != NULL ) { /* Set whatever header values we can in here */ tempHeader.magicNumber = av_be2ne32(header->magicNumber); memcpy( &buf[bufIdx], &tempHeader.magicNumber, sizeof(unsigned long) ); bufIdx += sizeof(unsigned long); tempHeader.length = av_be2ne32(header->length); memcpy( &buf[bufIdx], &tempHeader.length, sizeof(unsigned long) ); bufIdx += sizeof(unsigned long); tempHeader.channelID = av_be2ne32(header->channelID); memcpy( &buf[bufIdx], &tempHeader.channelID, sizeof(long) ); bufIdx += sizeof(long); tempHeader.sequence = av_be2ne32(header->sequence); /* Currently unsupported at server */ memcpy( &buf[bufIdx], &tempHeader.sequence, sizeof(long) ); bufIdx += sizeof(long); tempHeader.messageVersion = av_be2ne32(header->messageVersion); memcpy( &buf[bufIdx], &tempHeader.messageVersion, sizeof(unsigned long) ); bufIdx += sizeof(unsigned long); tempHeader.checksum = av_be2ne32(header->checksum); /* As suggested in protocol documentation */ memcpy( &buf[bufIdx], &tempHeader.checksum, sizeof(long) ); bufIdx += sizeof(long); tempHeader.messageType = av_be2ne32(header->messageType); memcpy( &buf[bufIdx], &tempHeader.messageType, sizeof(long) ); bufIdx += sizeof(long); } }
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) { const uint32_t *src = (const uint32_t *)avpkt->data; AVFrame *pic = data; int width = avctx->width; int y = 0; uint16_t *ydst, *udst, *vdst, *yend; int ret; if (avpkt->size < avctx->width * avctx->height * 8 / 3) { av_log(avctx, AV_LOG_ERROR, "Packet too small\n"); return AVERROR_INVALIDDATA; } if (avpkt->size > avctx->width * avctx->height * 8 / 3) { avpriv_request_sample(avctx, "(Probably) padded data"); } if ((ret = ff_get_buffer(avctx, pic, 0)) < 0) return ret; ydst = (uint16_t *)pic->data[0]; udst = (uint16_t *)pic->data[1]; vdst = (uint16_t *)pic->data[2]; yend = ydst + width; pic->pict_type = AV_PICTURE_TYPE_I; pic->key_frame = 1; for (;;) { uint32_t v = av_be2ne32(*src++); *udst++ = (v >> 16) & 0xFFC0; *ydst++ = (v >> 6 ) & 0xFFC0; *vdst++ = (v << 4 ) & 0xFFC0; v = av_be2ne32(*src++); *ydst++ = (v >> 16) & 0xFFC0; if (ydst >= yend) { ydst += pic->linesize[0] / 2 - width; udst += pic->linesize[1] / 2 - width / 2; vdst += pic->linesize[2] / 2 - width / 2; yend = ydst + width; if (++y >= avctx->height) break; } *udst++ = (v >> 6 ) & 0xFFC0; *ydst++ = (v << 4 ) & 0xFFC0; v = av_be2ne32(*src++); *vdst++ = (v >> 16) & 0xFFC0; *ydst++ = (v >> 6 ) & 0xFFC0; if (ydst >= yend) { ydst += pic->linesize[0] / 2 - width; udst += pic->linesize[1] / 2 - width / 2; vdst += pic->linesize[2] / 2 - width / 2; yend = ydst + width; if (++y >= avctx->height) break; } *udst++ = (v << 4 ) & 0xFFC0; v = av_be2ne32(*src++); *ydst++ = (v >> 16) & 0xFFC0; *vdst++ = (v >> 6 ) & 0xFFC0; *ydst++ = (v << 4 ) & 0xFFC0; if (ydst >= yend) { ydst += pic->linesize[0] / 2 - width; udst += pic->linesize[1] / 2 - width / 2; vdst += pic->linesize[2] / 2 - width / 2; yend = ydst + width; if (++y >= avctx->height) break; } } *got_frame = 1; return avpkt->size; }
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt) { int y=0; int width= avctx->width; AVFrame *pic= avctx->coded_frame; const uint32_t *src= (const uint32_t *)avpkt->data; uint16_t *ydst, *udst, *vdst, *yend; if(pic->data[0]) avctx->release_buffer(avctx, pic); if(avpkt->size < avctx->width * avctx->height * 8 / 3){ av_log(avctx, AV_LOG_ERROR, "Packet too small\n"); return -1; } if(avpkt->size > avctx->width * avctx->height * 8 / 3){ av_log_ask_for_sample(avctx, "Probably padded data\n"); } pic->reference= 0; if(avctx->get_buffer(avctx, pic) < 0) return -1; ydst= (uint16_t *)pic->data[0]; udst= (uint16_t *)pic->data[1]; vdst= (uint16_t *)pic->data[2]; yend= ydst + width; pic->pict_type= AV_PICTURE_TYPE_I; pic->key_frame= 1; for(;;){ uint32_t v= av_be2ne32(*src++); *udst++= (v>>16) & 0xFFC0; *ydst++= (v>>6 ) & 0xFFC0; *vdst++= (v<<4 ) & 0xFFC0; v= av_be2ne32(*src++); *ydst++= (v>>16) & 0xFFC0; if(ydst >= yend){ ydst+= pic->linesize[0]/2 - width; udst+= pic->linesize[1]/2 - width/2; vdst+= pic->linesize[2]/2 - width/2; yend= ydst + width; if(++y >= avctx->height) break; } *udst++= (v>>6 ) & 0xFFC0; *ydst++= (v<<4 ) & 0xFFC0; v= av_be2ne32(*src++); *vdst++= (v>>16) & 0xFFC0; *ydst++= (v>>6 ) & 0xFFC0; if(ydst >= yend){ ydst+= pic->linesize[0]/2 - width; udst+= pic->linesize[1]/2 - width/2; vdst+= pic->linesize[2]/2 - width/2; yend= ydst + width; if(++y >= avctx->height) break; } *udst++= (v<<4 ) & 0xFFC0; v= av_be2ne32(*src++); *ydst++= (v>>16) & 0xFFC0; *vdst++= (v>>6 ) & 0xFFC0; *ydst++= (v<<4 ) & 0xFFC0; if(ydst >= yend){ ydst+= pic->linesize[0]/2 - width; udst+= pic->linesize[1]/2 - width/2; vdst+= pic->linesize[2]/2 - width/2; yend= ydst + width; if(++y >= avctx->height) break; } } *data_size=sizeof(AVFrame); *(AVFrame*)data= *avctx->coded_frame; return avpkt->size; }
static int ExtractDSFrameData( uint8_t * buffer, struct DMImageData *frameData ) { int retVal = AVERROR(EIO); int bufIdx = 0; if( buffer != NULL ) { memcpy( frameData->identifier, &buffer[bufIdx], ID_LENGTH ); bufIdx += ID_LENGTH; memcpy( &frameData->jpegLength, &buffer[bufIdx], sizeof(unsigned long) ); bufIdx += sizeof(unsigned long); frameData->jpegLength = av_be2ne32(frameData->jpegLength); memcpy( &frameData->imgSeq, &buffer[bufIdx], sizeof(int64_t) ); bufIdx += sizeof(int64_t); frameData->imgSeq = av_be2ne64(frameData->imgSeq); memcpy( &frameData->imgTime, &buffer[bufIdx], sizeof(int64_t) ); bufIdx += sizeof(int64_t); memcpy( &frameData->camera, &buffer[bufIdx], sizeof(unsigned char) ); bufIdx += sizeof(unsigned char); memcpy( &frameData->status, &buffer[bufIdx], sizeof(unsigned char) ); bufIdx += sizeof(unsigned char); memcpy( &frameData->activity, &buffer[bufIdx], sizeof(unsigned short) * NUM_ACTIVITIES ); bufIdx += sizeof(unsigned short) * NUM_ACTIVITIES; memcpy( &frameData->QFactor, &buffer[bufIdx], sizeof(unsigned short) ); bufIdx += sizeof(unsigned short); frameData->QFactor = av_be2ne16(frameData->QFactor); memcpy( &frameData->height, &buffer[bufIdx], sizeof(unsigned short) ); bufIdx += sizeof(unsigned short); frameData->height = av_be2ne16(frameData->height); memcpy( &frameData->width, &buffer[bufIdx], sizeof(unsigned short) ); bufIdx += sizeof(unsigned short); frameData->width = av_be2ne16(frameData->width); memcpy( &frameData->resolution, &buffer[bufIdx], sizeof(unsigned short) ); bufIdx += sizeof(unsigned short); frameData->resolution = av_be2ne16(frameData->resolution); memcpy( &frameData->interlace, &buffer[bufIdx], sizeof(unsigned short) ); bufIdx += sizeof(unsigned short); frameData->interlace = av_be2ne16(frameData->interlace); memcpy( &frameData->subHeaderMask, &buffer[bufIdx], sizeof(unsigned short) ); bufIdx += sizeof(unsigned short); frameData->subHeaderMask = av_be2ne16(frameData->subHeaderMask); memcpy( frameData->camTitle, &buffer[bufIdx], sizeof(char) * CAM_TITLE_LENGTH ); bufIdx += sizeof(char) * CAM_TITLE_LENGTH; memcpy( frameData->alarmText, &buffer[bufIdx], sizeof(char) * ALARM_TEXT_LENGTH ); bufIdx += sizeof(char) * ALARM_TEXT_LENGTH; retVal = 0; } return retVal; }
static int ReadConnectReplyMessage( URLContext * h, NetworkMessage *message ) { SrvConnectReplyMsg * bodyPtr = NULL; /* Allocate memory in which to store the message body */ if( (message->body = av_malloc( sizeof(SrvConnectReplyMsg) )) == NULL ) return AVERROR(ENOMEM); bodyPtr = (SrvConnectReplyMsg *)message->body; /* Now read the message body, a field at a time */ if( DSReadBuffer( h, (uint8_t *)&bodyPtr->numCameras, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->viewableCamMask, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->telemetryCamMask, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->failedCamMask, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->maxMsgInterval, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->timestamp, sizeof(int64_t) ) != sizeof(int64_t) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)bodyPtr->cameraTitles, (16 * 28) ) != (16 * 28) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->unitType, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->applicationVersion, sizeof(unsigned long) ) != sizeof(unsigned long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->videoStandard, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)bodyPtr->macAddr, MAC_ADDR_LENGTH ) != MAC_ADDR_LENGTH ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)bodyPtr->unitName, UNIT_NAME_LENGTH ) != UNIT_NAME_LENGTH ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->numFixedRealms, sizeof(long) ) != sizeof(long) ) return AVERROR(EIO); bodyPtr->numFixedRealms = av_be2ne32(bodyPtr->numFixedRealms); if( DSReadBuffer( h, (uint8_t *)bodyPtr->realmFlags, (sizeof(unsigned long) * bodyPtr->numFixedRealms) ) != (sizeof(unsigned long) * bodyPtr->numFixedRealms) ) return AVERROR(EIO); if( DSReadBuffer( h, (uint8_t *)&bodyPtr->minimumViewerVersion, sizeof(unsigned long) ) != sizeof(unsigned long) ) return AVERROR(EIO); /* Correct the byte ordering */ bodyPtr->numCameras = av_be2ne32(bodyPtr->numCameras); bodyPtr->viewableCamMask = av_be2ne32(bodyPtr->viewableCamMask); bodyPtr->telemetryCamMask = av_be2ne32(bodyPtr->telemetryCamMask); bodyPtr->failedCamMask = av_be2ne32(bodyPtr->failedCamMask); bodyPtr->maxMsgInterval = av_be2ne32(bodyPtr->maxMsgInterval); bodyPtr->unitType = av_be2ne32(bodyPtr->unitType); bodyPtr->applicationVersion = av_be2ne32(bodyPtr->applicationVersion); bodyPtr->videoStandard = av_be2ne32(bodyPtr->videoStandard); bodyPtr->numFixedRealms = av_be2ne32(bodyPtr->numFixedRealms); bodyPtr->minimumViewerVersion = av_be2ne32(bodyPtr->minimumViewerVersion); return 0; }