std::tuple<std::string, double> TwitchStreamChunk::process(std::string body){
  int pos = _uri.find_last_of('/');
  std::string fileName = _uri.substr(pos + 1);
  std::ofstream file(fileName);
  file << body;
  file.flush();
  file.close();

  std::stringstream cmd;
  std::string audioFile = boost::filesystem::unique_path().native();
  audioFile.append(".wav");
  cmd << "ffmpeg -i " << fileName << " -vn -ac 1 " << audioFile << " > /dev/null 2>&1";

  system(cmd.str().c_str());

  FILE *aFile = fopen(audioFile.c_str(), "r");
  ps_decode_raw(getDecoder(), aFile, -1);
  fclose(aFile);

  auto logarithm = ps_get_logmath(getDecoder());

  int confidence = 1;
  const char * result = ps_get_hyp(getDecoder(), &confidence);
  double tmp = logmath_exp(logarithm, confidence);

  std::remove(fileName.c_str());
  std::remove(audioFile.c_str());
  return std::make_tuple(result == nullptr ? "" : std::string(result), tmp);
}
Esempio n. 2
0
std::vector<Decoder*> Decoder::getDecoders(const char* arch, const char* decNames) {

    assert(arch && decNames);

    std::vector<Decoder*> decoders = std::vector<Decoder*>();

    size_t decCopyLen = strlen(decNames);
    char decCopyBuf[decCopyLen + 1];
    char* decCopyBase = &(decCopyBuf[0]);
    memset(decCopyBase, 0, decCopyLen + 1);
    strncpy(decCopyBase, decNames, decCopyLen); 

    char* decCopyStart = decCopyBase;
    for(;*decCopyBase;decCopyBase++)
    {
        if(*decCopyBase == ',')
        {
            *decCopyBase = 0;
            Decoder* cur = getDecoder(arch, decCopyStart);
            if (cur != NULL) {
                decoders.push_back(cur);
            }
            decCopyStart = decCopyBase + 1;
        }
    }

    /* The last arg will not have a comma to terminate */
    Decoder* cur = getDecoder(arch, decCopyStart);
    if (cur != NULL) {
        decoders.push_back(cur);
    }

    /* Return the filtered list of decoders */
    return decoders;
}
        std::vector<Ref<Result> > QRCodeMultiReader::decodeMultiple(Ref<BinaryBitmap> image,
                                                                    DecodeHints hints) {
            std::vector<Ref<Result> > results;
            MultiDetector detector(image->getBlackMatrix());

            std::vector<Ref<DetectorResult> > detectorResult = detector.detectMulti(hints);
            for (unsigned int i = 0; i < detectorResult.size(); i++) {
                try {
                    Ref<DecoderResult> decoderResult = getDecoder().decode(
                            detectorResult[i]->getBits());
                    ArrayRef<Ref<ResultPoint> > points = detectorResult[i]->getPoints();
                    Ref<Result> result = Ref<Result>(new Result(decoderResult->getText(),
                                                                decoderResult->getRawBytes(),
                                                                points, BarcodeFormat::QR_CODE));
                    // result->putMetadata(ResultMetadataType.BYTE_SEGMENTS, decoderResult->getByteSegments());
                    // result->putMetadata(ResultMetadataType.ERROR_CORRECTION_LEVEL, decoderResult->getECLevel().toString());
                    results.push_back(result);
                } catch (ReaderException const &re) {
                    (void) re;
                    // ignore and continue
                }
            }
            if (results.empty()) {
                throw ReaderException("No code detected");
            }
            return results;
        }
               virtual log4cxx_status_t decode(ByteBuffer& in,
                  LogString& out) {
                  const char* p = in.current();
                  size_t i = in.position();
#if !LOG4CXX_CHARSET_EBCDIC                  
                  for (; i < in.limit() && ((unsigned int) *p) < 0x80; i++, p++) {
                      out.append(1, *p);
                  }
                  in.position(i);
#endif                  
                  if (i < in.limit()) {
                           Pool subpool;
                           const char* enc = apr_os_locale_encoding(subpool.getAPRPool());
                           {
                                synchronized sync(mutex);
                                if (enc == 0) {
                                    if (decoder == 0) {
                                        encoding = "C";
                                        decoder = new USASCIICharsetDecoder();
                                    }
                                } else if (encoding != enc) {
                                    encoding = enc;
                                    try {
                                       LogString e;
                                       Transcoder::decode(encoding, e);
                                       decoder = getDecoder(e);
                                    } catch (IllegalArgumentException& ex) {
                                       decoder = new USASCIICharsetDecoder();
                                    }
                                }
                            }
                            return decoder->decode(in, out);        
                  }
                  return APR_SUCCESS;  
               }
Esempio n. 5
0
void SmushPlayer::handleFrame() {
	if (_demo) {
		_x = getDecoder()->getX();
		_y = getDecoder()->getY();
	}

	if (_videoDecoder->endOfVideo()) {
		// If we're not supposed to loop (or looping fails) then end the video
		if (!_videoLooping ) {
			_videoFinished = true;
			g_grim->setMode(GrimEngine::NormalMode);
			deinit();
			return;
		} else {
			getDecoder()->rewind(); // This doesnt handle if looping fails.
		}
	}
}
Esempio n. 6
0
void SmushPlayer::init() {
	SmushDecoder *decoder = getDecoder();
	if (_demo) {
		_x = decoder->getX();
		_y = decoder->getY();
	} else {
		decoder->setLooping(_videoLooping);
	}
	MoviePlayer::init();
}
Esempio n. 7
0
void DSDDemod::webapiFormatChannelReport(SWGSDRangel::SWGChannelReport& response)
{
    double magsqAvg, magsqPeak;
    int nbMagsqSamples;
    getMagSqLevels(magsqAvg, magsqPeak, nbMagsqSamples);

    response.getDsdDemodReport()->setChannelPowerDb(CalcDb::dbPower(magsqAvg));
    response.getDsdDemodReport()->setAudioSampleRate(m_audioSampleRate);
    response.getDsdDemodReport()->setChannelSampleRate(m_inputSampleRate);
    response.getDsdDemodReport()->setSquelch(m_squelchOpen ? 1 : 0);
    response.getDsdDemodReport()->setPllLocked(getDecoder().getSymbolPLLLocked() ? 1 : 0);
    response.getDsdDemodReport()->setSlot1On(getDecoder().getVoice1On() ? 1 : 0);
    response.getDsdDemodReport()->setSlot2On(getDecoder().getVoice2On() ? 1 : 0);
    response.getDsdDemodReport()->setSyncType(new QString(getDecoder().getFrameTypeText()));
    response.getDsdDemodReport()->setInLevel(getDecoder().getInLevel());
    response.getDsdDemodReport()->setCarierPosition(getDecoder().getCarrierPos());
    response.getDsdDemodReport()->setZeroCrossingPosition(getDecoder().getZeroCrossingPos());
    response.getDsdDemodReport()->setSyncRate(getDecoder().getSymbolSyncQuality());
    response.getDsdDemodReport()->setStatusText(new QString(updateAndGetStatusText()));
}
Esempio n. 8
0
    void SoundManager::streamMusicFull(const std::string& filename)
    {
        if(!mOutput->isInitialized())
            return;
        std::cout <<"Playing "<<filename<< std::endl;
        try
        {
            stopMusic();

            DecoderPtr decoder = getDecoder();
            decoder->open(filename);

            mMusic = mOutput->streamSound(decoder, volumeFromType(Play_TypeMusic),
                                          1.0f, Play_NoEnv|Play_TypeMusic);
        }
        catch(std::exception &e)
        {
            std::cout << "Music Error: " << e.what() << "\n";
        }
    }
Esempio n. 9
0
/**
	Load or append a file.
	The file type is determined automatically and the ad-hoc video decoder is spawned


*/
uint8_t ADM_Composer::addFile (char *name, uint8_t mode)
{
  uint8_t    ret =    0;
  aviInfo    info;
  WAVHeader *    _wavinfo;
//  aviHeader *    tmp;
  fileType    type =    Unknown_FileType;

UNUSED_ARG(mode);
	_haveMarkers=0; // by default no markers are present
  assert (_nb_segment < MAX_SEG);
  assert (_nb_video < MAX_VIDEO);

  if (!identify (name, &type))
    return 0;


#define OPEN_AS(x,y) case x:\
						_videos[_nb_video]._aviheader=new y; \
						 ret = _videos[_nb_video]._aviheader->open(name); \
						break;
  switch (type)
    {
      case VCodec_FileType:
      		loadVideoCodecConf(name);      		
		return ADM_IGN; // we do it but it wil fail, no problem with that
      		break;
      OPEN_AS (Mp4_FileType, mp4Header);
      OPEN_AS (H263_FileType, h263Header);
      // For AVI we first try top open it as openDML
      case AVI_FileType:
      			_videos[_nb_video]._aviheader=new OpenDMLHeader; 
			 ret = _videos[_nb_video]._aviheader->open(name); 			
			break;
      
      OPEN_AS (Nuppel_FileType, nuvHeader);
      OPEN_AS (BMP_FileType, picHeader);
      OPEN_AS (MpegIdx_FileType, mpeg2decHeader);
      OPEN_AS (_3GPP_FileType, _3GPHeader);
       OPEN_AS (Ogg_FileType, oggHeader);

    case Mpeg_FileType:
    	// look if the idx exists
	char tmpname[256];
	assert(strlen(name)+5<256);;
	strcpy(tmpname,name);
	strcat(tmpname,".idx");
	if(addFile(tmpname)) return 1;
      // then propose to index it
      if (GUI_Question ("This looks like mpeg\n Do you want to index it?"))
	{
	  char *	  idx, *	    mname;
	  int	    track;

	  DIA_indexerPrefill(name);
	  if (DIA_mpegIndexer (&mname, &idx, &track, 1))
	    {
	      if ((mname == NULL) || (idx == NULL))
		{
		  GUI_Alert ("Select files!");

		  return 0;
		}
	      printf ("\n indexing :%s to \n%s\n", mname, idx);


	      if (indexMpeg (mname, idx, (uint8_t) track))
	      {
	      		printf("\n re-opening %s\n", idx);
			return addFile (idx, 0);
	      }
	      return 0;

	    }
	}
      return 0;
      break;
	case WorkBench_FileType:

  		return loadWorbench(name);
    default:
      if (type == Unknown_FileType)
	{
	  printf ("\n not identified ...\n");
	}
      else
	printf
	  ("\n successfully identified but no loader support detected...\n");
      return 0;
    }
  // check opening was successful
  if (ret == 0)
    {
      printf ("\n Attempt to open %s failed!\n", name);
      delete
	_videos[_nb_video].
	_aviheader;;
      return 0;
    }

  // else update info
  _videos[_nb_video]._aviheader->getVideoInfo (&info);
  _videos[_nb_video]._aviheader->setMyName (name);
//    fourCC::print( info.fcc );
  _total_frames += info.nb_frames;
  _videos[_nb_video]._nb_video_frames = info.nb_frames;


  // and update audio info
  //_________________________
  _wavinfo = _videos[_nb_video]._aviheader->getAudioInfo ();	//wavinfo); // will be null if no audio
  if (!_wavinfo)
    {
      printf ("\n *** NO AUDIO ***\n");
      _videos[_nb_video]._audiostream = NULL;
    }
  else
    {

      _videos[_nb_video]._aviheader->getAudioStream (&_videos[_nb_video].
						     _audiostream);
      _videos[_nb_video]._audio_size =
	_videos[_nb_video]._audiostream->getLength ();
	// For mpeg2, try to guess if it is pulldowned material
	double duration_a, duration_v;
	double rdirect, rpulldown;
	
	duration_a=_videos[_nb_video]._audio_size;
	duration_a/=_wavinfo->byterate;   // now we got duration in seconds
	
	// ditto for video
	duration_v= _videos[_nb_video]._nb_video_frames;
	duration_v/=info.fps1000;
	duration_v*=1000;
	
	printf("Audio : %f video : %f\n",duration_a,duration_v);
	if(MpegIdx_FileType==type && info.fps1000>29000 && info.fps1000<30000
		 && duration_a>1 && duration_v>1)
	{
		rdirect=(duration_a-duration_v)/duration_v;
		if(rdirect<0) rdirect=-rdirect;
		
		rpulldown=((duration_a*0.8)-duration_v)/duration_v;
		if(rpulldown<0) rpulldown=-rpulldown;
		
		
		printf("Direct : %f pd : %f\n",rdirect,rpulldown);
		if( rdirect*2> rpulldown)
		{
			printf("Probably pulldowned, switching to 23.976 \n");
			 AVIStreamHeader *ily =	_videos[_nb_video]._aviheader->	getVideoStreamHeader ();
      				ily->dwRate = 23976;
      				ily->dwScale = 1000;			
		
		}
		
	
	}
	

    }

  printf ("\n Decoder FCC: ");
  fourCC::print (info.fcc);
  // ugly hack
  if (info.fps1000 > 2000 * 1000)
    {
      printf (" FPS too high, switching to 25 fps hardcoded\n");
      info.fps1000 = 25 * 1000;
      updateVideoInfo (&info);
    }
  uint32_t
    l;
  uint8_t *
    d;
  _videos[_nb_video]._aviheader->getExtraHeaderData (&l, &d);
  _videos[_nb_video].decoder = getDecoder (info.fcc,
					   info.width, info.height, l, d);


  //
  //  And automatically create the segment
  //
  _segments[_nb_segment]._reference = _nb_video;
  _segments[_nb_segment]._audio_size = _videos[_nb_video]._audio_size;
  _segments[_nb_segment]._audio_start = 0;
  _segments[_nb_segment]._start_frame = 0;
  _segments[_nb_segment]._nb_frames   =   _videos[_nb_video]._nb_video_frames ;
  _videos[_nb_video]._isAudioVbr=0;
  // next one please
	_nb_video++;
	_nb_segment++;

//______________________________________
// 1-  check for B _ frame  existence
// 2- check  for consistency with reported flags
//______________________________________
	uint8_t count=0;
TryAgain:	
	_VIDEOS 	*vid;
	uint32_t err=0;

		vid= &(_videos[_nb_video-1]);
		vid->_forwardReference=0xFFFFFFF;
		vid->_forwardFrame= NULL;
		vid->_reorderReady=0;
		// we only try if we got everything needed...
		if(!vid->decoder)
		{
			printf("\n no decoder to check for B- frame\n");
			return 1;
		}
		if(!vid->decoder->bFramePossible())
		{
			printf("\n no  B- frame with that codec \n");
			return 1;
		}
		printf("\n checking for B-Frames...\n");
		if( vid->_nb_video_frames >15) // 12
		{
				uint8_t 		*buffer,*bufferin;
				uint32_t 		len,flags,flag2;
				uint8_t 		bframe=0, bconsistency=1;

				buffer=new uint8_t [info.width* info.height*2];
				bufferin=new uint8_t [info.width* info.height*2];


				// we decode 5 frames..should be enough to get an opinion
				for(uint32_t i=0;i<13;i++)  //10
				{
					flags=flag2=0;
  					vid->_aviheader->getFrameNoAlloc (i,
							 bufferin,
							 &len, &flags);
					if(!vid->decoder->uncompress( (uint8_t *)bufferin,(uint8_t *)buffer,len,&flag2 ))
					{
						err++;
						printf("\n ***oops***\n");
					}

					if(i<5) continue; // ignore the first frames
					
					// check if it is a b-frame
					//printf(" %lu : %lu \n",i,flag2);
					if(flag2 & AVI_B_FRAME)
					{
						printf(" %lu is a b frame\n",i);
					 	bframe=1;
						vid->_aviheader->getFlags(i,&flags);
						if(!(flags & AVI_B_FRAME))
							bconsistency=0;
						else
							printf("\n and flags is ok\n");
					}
					
				}

				delete [] buffer;
				delete [] bufferin;
				if(bframe)
				{
					printf("\n Mmm this appear to have b-frame...\n");
					if(bconsistency)
					{
						printf("\n And the index is ok\n");
						vid->_forwardFrame=new uint8_t [720*576*3];
						vid->_reorderReady=vid->_aviheader->reorder();
						if(vid->_reorderReady)
						{
							printf("\n Frames re-ordered, B-frame friendly now :)\n");
							aprintf(" we had :%lu",info.nb_frames);
							// update nb frame in case we dropped some
							_total_frames -= info.nb_frames;
							_videos[_nb_video-1]._aviheader->getVideoInfo (&info);
							aprintf(" we have now  :%lu",info.nb_frames);
							_total_frames += info.nb_frames;
  							_videos[_nb_video-1]._nb_video_frames = info.nb_frames;
						}
						else
						{
							printf("\n Frames not  re-ordered, expect problem with b-frames\n");
						}

					}
					else
					{
						printf("\n But the  index is not up to date \n");
						uint32_t ispacked=0;
						// If it is Divx 5.0.xxx use divx decoder
						if(fourCC::check(info.fcc,(uint8_t *)"DX50")
						|| fourCC::check(info.fcc,(uint8_t *)"XVID" ))
						{


							//if(vid->decoder->isDivxPacked())
							if(vid->decoder->isDivxPacked())
							{
								// can only unpack avi
								if(!count && type==AVI_FileType)
								{
									if(GUI_Question("It looks like Vop packed divx.\nDo you want me to unpack it ?"))
									{
									OpenDMLHeader *dml=NULL;
									count++;	
									dml=(OpenDMLHeader *)vid->_aviheader;
									// Can we repack it ?
									if(dml->unpackPacked())	
										goto TryAgain;
									GUI_Alert("Could not unpack it\n, using backup decoder= not frame accurate.");
									}
								}
#if  1 //def USE_DIVX

								printf("\n Switching codec...\n");
								delete vid->decoder;
								vid->decoder=getDecoderVopPacked(info.fcc,
																   info.width,
																   info.height,0,NULL);
								ispacked=1;
#else
								GUI_Alert("Troubles ahead : This a vop packed avi..");
#endif

							}

						}
						// else warn user
						if(!ispacked)
							GUI_Alert("\n Please used Misc->Rebuild frame for BFrames!");
					}
				}
				else
				{
					printf("Seems it does not contain B-frames...\n");
				}
		printf(" End of B-frame check\n");
		}

  return 1;
}
Esempio n. 10
0
void SmushPlayer::restoreState(SaveGame *state) {
	MoviePlayer::restoreState(state);
	if (isPlaying()) {
		getDecoder()->seekToTime((uint32)_movieTime); // Currently not fully working (out of synch)
	}
}
Esempio n. 11
0
void SmushPlayer::postHandleFrame() {
	if (_demo) {
		_x = getDecoder()->getX();
		_y = getDecoder()->getY();
	}
}
Esempio n. 12
0
SmushPlayer::SmushPlayer(bool demo) : MoviePlayer(), _demo(demo) {
	_videoDecoder = new Grim::SmushDecoder();
	getDecoder()->setDemo(_demo);
}
Esempio n. 13
0
/*
 * Class:     org_apache_harmony_awt_gl_image_GifDecoder
 * Method:    decode
 * Signature: ([BIJLorg/apache/harmony/awt/gl/image/GifDecoder$GifDataStream;Lorg/apache/harmony/awt/gl/image/GifDecoder$GifGraphicBlock;)I
 */
JNIEXPORT jint JNICALL Java_org_apache_harmony_awt_gl_image_GifDecoder_decode
(JNIEnv *env,
 jobject obj,
 jbyteArray jInput,
 jint bytesInBuffer,
 jlong hDecoder,
 jobject dataStream,
 jobject currBlock) {

    GIF_RETVAL retval = STATUS_OK;
    GifDecoder *decoder = getDecoder(env, obj, dataStream, (GifDecoder*) ((IDATA)hDecoder));
    int scanlinesDecoded;

    decoder->input = decoder->inputPtr =
                         (*env)->GetPrimitiveArrayCritical(env, jInput, 0);
    decoder->bytesInBuffer += bytesInBuffer;
    bytesInBuffer = decoder->bytesInBuffer;

    while(retval == STATUS_OK && decoder->bytesInBuffer > 0) {
        switch(decoder->state) {
        case STATE_INIT: {
            retval = readHeader(env, decoder);
            break;
        }

        case STATE_AT_GLOBAL_COLOR_TABLE: {
            retval = loadColorTable(env, decoder->jGlobalColorTable, decoder);
            break;
        }

        case STATE_AT_LOCAL_COLOR_TABLE: {
            retval = loadColorTable(env, NULL, decoder);
            break;
        }

        case STATE_BLOCK_BEGINNING: {
            unsigned char blockLabel = *(decoder->inputPtr);
            switch(blockLabel) {
            case EXTENSION_INTRODUCER:
                retval = readExtension(env, decoder, currBlock);
                break;
            case IMAGE_SEPARATOR:
                retval = readImageDescriptor(env, currBlock, decoder);
                break;
            case GIF_TRAILER:
                retval = STATUS_EOF;
                break;
            }
            break;
        }

        case STATE_STARTING_DECOMPRESSION: {
            retval = initDecompression(env, decoder, currBlock);
            break;
        }

        case STATE_DECOMPRESSING: {
            if(!decoder->interlace)
                retval = decompress(env, currBlock, decoder);
            else
                retval = decompressInterlaced(env, currBlock, decoder);
            break;
        }

        case STATE_READING_COMMENT: {
            retval = readComment(env, decoder);
            break;
        }

        case STATE_SKIPPING_BLOCKS: {
            retval = skipData(decoder);
            break;
        }

        default:
            // Should never execute this!
            break;
        }
    }

    // Copy unconsumed data to the start of the input buffer
    if(decoder->bytesInBuffer > 0) {
        memmove(decoder->input, decoder->inputPtr, decoder->bytesInBuffer);
    }

    (*env)->ReleasePrimitiveArrayCritical(env, jInput, decoder->input, 0);

    (*env)->SetIntField(
        env,
        obj,
        img_GIF_bytesConsumedID,
        bytesInBuffer - decoder->bytesInBuffer
    );

    if(decoder->stateVars.imageDataStarted) {
        if(!decoder->interlace) {
            scanlinesDecoded = decoder->pixelsDecoded / decoder->currentWidth -
                               decoder->oldPixelsDecoded / decoder->currentWidth;
            decoder->oldPixelsDecoded = decoder->pixelsDecoded;
        } else {
            if(retval == STATUS_LINE_COMPLETED && decoder->pass < MAX_PASS) {
                scanlinesDecoded = 1;
                if(decoder->currScanline >= 0)
                    (*env)->SetIntField(env, currBlock, img_GIF_gb_currYID, decoder->currScanline);

                decoder->scanlineOffset = 0;
            } else {
                scanlinesDecoded = 0;
            }
        }
    } else {
        scanlinesDecoded = 0;
    }

    if(retval == STATUS_FRAME_COMPLETED) {
        decoder->oldPixelsDecoded = decoder->pixelsDecoded = 0;
    }

    // Free the decoder if decoding is finished
    if(retval == STATUS_EOF) {
        free(decoder);
        decoder = NULL;
    }

    (*env)->SetLongField(env, obj, img_GIF_hNativeDecoderID, (jlong) ((IDATA)decoder));

    return scanlinesDecoded;
}
uint8_t	ADMVideoAnimated::loadImage(void)
{
picHeader *pic=NULL;
decoders *decoder=NULL;
ADMImage  *fullSize=NULL;
ADMImageResizer *resizer=NULL;
uint32_t len=0,flags=0;
uint32_t w,h;
uint8_t *extraData=NULL;
uint32_t extraDataSize=0;

uint8_t *rdBuffer=NULL;

    if(_BkgGnd) delete _BkgGnd;
    _BkgGnd=NULL;

    // open the jpg file and load it to binary
    
    pic=new picHeader;
    if(!pic->open((char *)_param->backgroundImg))
    {
        printf("[Animated] Cannot load background image\n");
        goto skipIt;
    }
  
    // Ok, now we need its size
    
    w=pic->getWidth();
    h=pic->getHeight();
    printf("[Animated]Pic: %d x %d\n",w,h);
    pic->getExtraHeaderData(&extraDataSize,&extraData);
    //********
    {
        aviInfo info;
        pic->getVideoInfo(&info);
        decoder=getDecoder (info.fcc, w,h,extraDataSize,extraData);
    }
     if(!decoder) 
    {
        printf("[Animated]Cannot get decoder\n");
        goto skipIt;
    }
    // Build new image
    fullSize=new ADMImage(w,h);
    fullSize->blacken();
    rdBuffer=new uint8_t[w*h*3];     // Hardcoded!
    if(!pic->getFrameNoAlloc(0,rdBuffer,&len))
    {
        printf("[Animated]Get frame failed\n");
        goto skipIt;
    }
    // Decode it
    if(!decoder->uncompress (rdBuffer, fullSize, len,&flags))
    {
        printf("[Animated]Decoding failed\n");
        goto skipIt;
    }
    if(fullSize->_colorspace!=ADM_COLOR_YV12)
    {
        printf("[Animated]Wrong colorspace, only yv12 supported\n");
        goto skipIt;
    }
    // Need to packit ?
    if(fullSize->_planeStride[0])
        fullSize->pack(0);
    // Resize it
    _BkgGnd=new ADMImage(_info.width,_info.height);
    resizer=new ADMImageResizer(w,h,_info.width,_info.height);
    //Util_saveJpg ("/tmp/before.jpg",w,h,fullSize);
    if(!resizer->resize(fullSize,_BkgGnd))
    {
        delete _BkgGnd;
        _BkgGnd=NULL;
        printf("[Animated]Resize failed\n");
        
    }else
    {
        printf("[Animated]Image ready\n");
    }
    
skipIt:
    {
        if(decoder) delete decoder;
        decoder=NULL;
        if(pic)     delete pic;
        pic=NULL;
        if(fullSize)     delete fullSize;
        fullSize=NULL;
        if(resizer)     delete resizer;
        resizer=NULL;
        if(rdBuffer)     delete [] rdBuffer;
        rdBuffer=NULL;
    }
    return 1;


}
Esempio n. 15
0
float PDCHCommon::FER() const { return getDecoder()->FER(); }
Esempio n. 16
0
void PDCHCommon::countBadFrame() { getDecoder()->countBadFrame(); }
Esempio n. 17
0
    void RPIImageDecoder::decode(bool onlySize)
    {
        unsigned int width, height;

        if (failed())
            return;

        // make sure we have all the data before doing anything
        if (!isAllDataReceived())
            return;

        if (onlySize)
        {
            if (readSize(width, height));
            {
                setSize(width, height);
            }
            return;
        }
        else
        {
            readSize(width, height);

            clock_t start = clock();

            ImageFrame& buffer = m_frameBufferCache[0];

            if (m_frameBufferCache.isEmpty())
            {
                log("decode : frameBuffercache is empty");
                setFailed();
                return;
            }

            if (buffer.status() == ImageFrame::FrameEmpty)
            {
                if (!buffer.setSize(width, height))
                {
                    log("decode : could not define buffer size");
                    setFailed();
                    return;
                }

                // The buffer is transparent outside the decoded area while the image is
                // loading. The completed image will be marked fully opaque in jpegComplete().
                buffer.setHasAlpha(false);
            }

            // lock the mutex so that we only process once at a time
            pthread_mutex_lock(&decode_mutex);

            // setup decoder request information
            BRCMIMAGE_REQUEST_T* dec_request = getDecoderRequest();
            BRCMIMAGE_T *decoder = getDecoder();

            memset(dec_request, 0, sizeof(BRCMIMAGE_REQUEST_T));
            dec_request->input = (unsigned char*)m_data->data();
            dec_request->input_size = m_data->size();
            dec_request->output = (unsigned char*)buffer.getAddr(0, 0);
            dec_request->output_alloc_size = width * height * 4;
            dec_request->output_handle = 0;
            dec_request->pixel_format = PIXEL_FORMAT_RGBA;
            dec_request->buffer_width = 0;
            dec_request->buffer_height = 0;

            brcmimage_acquire(decoder);
            BRCMIMAGE_STATUS_T status = brcmimage_process(decoder, dec_request);

            if (status == BRCMIMAGE_SUCCESS)
            {
                clock_t copy = clock();

                unsigned char *ptr = (unsigned char *)buffer.getAddr(0, 0);
                for (unsigned int i = 0; i < dec_request->height * dec_request->width; i++)
                {
                    // we swap RGBA -> BGRA
                    unsigned char tmp = *ptr;
                    *ptr = ptr[2];
                    ptr[2] = tmp;
                    ptr += 4;
                }

                brcmimage_release(decoder);

                buffer.setPixelsChanged(true);
                buffer.setStatus(ImageFrame::FrameComplete);
                buffer.setHasAlpha(m_hasAlpha);

                clock_t end = clock();
                unsigned long millis = (end - start) * 1000 / CLOCKS_PER_SEC;
                unsigned long copymillis = (end - copy) * 1000 / CLOCKS_PER_SEC;

                log("decode : image (%d x %d)(Alpha=%d) decoded in %d ms (copy in %d ms), source size = %d bytes", width, height, m_hasAlpha, millis, copymillis, m_data->size());

            }
            else
            {
                log("decode : Decoding failed with status %d", status);
            }

            pthread_mutex_unlock(&decode_mutex);
        }


    }
Esempio n. 18
0
static void applyConfiguration(DECLARE_ENGINE_PARAMETER_F) {
	air_pressure_sensor_config_s * apConfig = &engineConfiguration->map.sensor;
	customMap.init(0, apConfig->valueAt0, 5, apConfig->valueAt5);
	mapDecoder = getDecoder(engineConfiguration->map.sensor.type);
}
Esempio n. 19
0
void DSDDemod::formatStatusText()
{
    switch (getDecoder().getSyncType())
    {
    case DSDcc::DSDDecoder::DSDSyncDMRDataMS:
    case DSDcc::DSDDecoder::DSDSyncDMRDataP:
    case DSDcc::DSDDecoder::DSDSyncDMRVoiceMS:
    case DSDcc::DSDDecoder::DSDSyncDMRVoiceP:
        if (m_signalFormat != signalFormatDMR)
        {
            strcpy(m_formatStatusText, "Sta: __ S1: __________________________ S2: __________________________");
        }

        switch (getDecoder().getStationType())
        {
        case DSDcc::DSDDecoder::DSDBaseStation:
            memcpy(&m_formatStatusText[5], "BS ", 3);
            break;
        case DSDcc::DSDDecoder::DSDMobileStation:
            memcpy(&m_formatStatusText[5], "MS ", 3);
            break;
        default:
            memcpy(&m_formatStatusText[5], "NA ", 3);
            break;
        }

        memcpy(&m_formatStatusText[12], getDecoder().getDMRDecoder().getSlot0Text(), 26);
        memcpy(&m_formatStatusText[43], getDecoder().getDMRDecoder().getSlot1Text(), 26);
        m_signalFormat = signalFormatDMR;
        break;
    case DSDcc::DSDDecoder::DSDSyncDStarHeaderN:
    case DSDcc::DSDDecoder::DSDSyncDStarHeaderP:
    case DSDcc::DSDDecoder::DSDSyncDStarN:
    case DSDcc::DSDDecoder::DSDSyncDStarP:
        if (m_signalFormat != signalFormatDStar)
        {
                                     //           1    1    2    2    3    3    4    4    5    5    6    6    7    7    8
                                     // 0....5....0....5....0....5....0....5....0....5....0....5....0....5....0....5....0..
            strcpy(m_formatStatusText, "________/____>________|________>________|____________________|______:___/_____._");
                                     // MY            UR       RPT1     RPT2     Info                 Loc    Target
        }

        {
            const std::string& rpt1 = getDecoder().getDStarDecoder().getRpt1();
            const std::string& rpt2 = getDecoder().getDStarDecoder().getRpt2();
            const std::string& mySign = getDecoder().getDStarDecoder().getMySign();
            const std::string& yrSign = getDecoder().getDStarDecoder().getYourSign();

            if (rpt1.length() > 0) { // 0 or 8
                memcpy(&m_formatStatusText[23], rpt1.c_str(), 8);
            }
            if (rpt2.length() > 0) { // 0 or 8
                memcpy(&m_formatStatusText[32], rpt2.c_str(), 8);
            }
            if (yrSign.length() > 0) { // 0 or 8
                memcpy(&m_formatStatusText[14], yrSign.c_str(), 8);
            }
            if (mySign.length() > 0) { // 0 or 13
                memcpy(&m_formatStatusText[0], mySign.c_str(), 13);
            }
            memcpy(&m_formatStatusText[41], getDecoder().getDStarDecoder().getInfoText(), 20);
            memcpy(&m_formatStatusText[62], getDecoder().getDStarDecoder().getLocator(), 6);
            snprintf(&m_formatStatusText[69], 82-69, "%03d/%07.1f",
                    getDecoder().getDStarDecoder().getBearing(),
                    getDecoder().getDStarDecoder().getDistance());
        }

        m_formatStatusText[82] = '\0';
        m_signalFormat = signalFormatDStar;
        break;
    case DSDcc::DSDDecoder::DSDSyncDPMR:
        snprintf(m_formatStatusText, 82, "%s CC: %04d OI: %08d CI: %08d",
                DSDcc::DSDdPMR::dpmrFrameTypes[(int) getDecoder().getDPMRDecoder().getFrameType()],
                getDecoder().getDPMRDecoder().getColorCode(),
                getDecoder().getDPMRDecoder().getOwnId(),
                getDecoder().getDPMRDecoder().getCalledId());
        m_signalFormat = signalFormatDPMR;
        break;
    case DSDcc::DSDDecoder::DSDSyncNXDNP:
    case DSDcc::DSDDecoder::DSDSyncNXDNN:
        if (getDecoder().getNXDNDecoder().getRFChannel() == DSDcc::DSDNXDN::NXDNRCCH)
        {
            //           1    1    2    2    3    3    4    4    5    5    6    6    7    7    8
            // 0....5....0....5....0....5....0....5....0....5....0....5....0....5....0....5....0..
            // RC r cc mm llllll ssss
            snprintf(m_formatStatusText, 82, "RC %s %02d %02X %06X %02X",
                    getDecoder().getNXDNDecoder().isFullRate() ? "F" : "H",
                    getDecoder().getNXDNDecoder().getRAN(),
                    getDecoder().getNXDNDecoder().getMessageType(),
                    getDecoder().getNXDNDecoder().getLocationId(),
                    getDecoder().getNXDNDecoder().getServicesFlag());
        }
        else if ((getDecoder().getNXDNDecoder().getRFChannel() == DSDcc::DSDNXDN::NXDNRTCH)
            || (getDecoder().getNXDNDecoder().getRFChannel() == DSDcc::DSDNXDN::NXDNRDCH))
        {
            if (getDecoder().getNXDNDecoder().isIdle()) {
                snprintf(m_formatStatusText, 82, "%s IDLE", getDecoder().getNXDNDecoder().getRFChannelStr());
            }
            else
            {
                //           1    1    2    2    3    3    4    4    5    5    6    6    7    7    8
                // 0....5....0....5....0....5....0....5....0....5....0....5....0....5....0....5....0..
                // Rx r cc mm sssss>gddddd
                snprintf(m_formatStatusText, 82, "%s %s %02d %02X %05d>%c%05d",
                        getDecoder().getNXDNDecoder().getRFChannelStr(),
                        getDecoder().getNXDNDecoder().isFullRate() ? "F" : "H",
                        getDecoder().getNXDNDecoder().getRAN(),
                        getDecoder().getNXDNDecoder().getMessageType(),
                        getDecoder().getNXDNDecoder().getSourceId(),
                        getDecoder().getNXDNDecoder().isGroupCall() ? 'G' : 'I',
                        getDecoder().getNXDNDecoder().getDestinationId());
            }
        }
        else
        {
            //           1    1    2    2    3    3    4    4    5    5    6    6    7    7    8
            // 0....5....0....5....0....5....0....5....0....5....0....5....0....5....0....5....0..
            // RU
            snprintf(m_formatStatusText, 82, "RU");
        }
        m_signalFormat = signalFormatNXDN;
        break;
    case DSDcc::DSDDecoder::DSDSyncYSF:
        //           1    1    2    2    3    3    4    4    5    5    6    6    7    7    8
        // 0....5....0....5....0....5....0....5....0....5....0....5....0....5....0....5....0..
        // C V2 RI 0:7 WL000|ssssssssss>dddddddddd |UUUUUUUUUU>DDDDDDDDDD|44444
        if (getDecoder().getYSFDecoder().getFICHError() == DSDcc::DSDYSF::FICHNoError)
        {
            snprintf(m_formatStatusText, 82, "%s ", DSDcc::DSDYSF::ysfChannelTypeText[(int) getDecoder().getYSFDecoder().getFICH().getFrameInformation()]);
        }
        else
        {
            snprintf(m_formatStatusText, 82, "%d ", (int) getDecoder().getYSFDecoder().getFICHError());
        }

        snprintf(&m_formatStatusText[2], 80, "%s %s %d:%d %c%c",
                DSDcc::DSDYSF::ysfDataTypeText[(int) getDecoder().getYSFDecoder().getFICH().getDataType()],
                DSDcc::DSDYSF::ysfCallModeText[(int) getDecoder().getYSFDecoder().getFICH().getCallMode()],
                getDecoder().getYSFDecoder().getFICH().getBlockTotal(),
                getDecoder().getYSFDecoder().getFICH().getFrameTotal(),
                (getDecoder().getYSFDecoder().getFICH().isNarrowMode() ? 'N' : 'W'),
                (getDecoder().getYSFDecoder().getFICH().isInternetPath() ? 'I' : 'L'));

        if (getDecoder().getYSFDecoder().getFICH().isSquelchCodeEnabled())
        {
            snprintf(&m_formatStatusText[14], 82-14, "%03d", getDecoder().getYSFDecoder().getFICH().getSquelchCode());
        }
        else
        {
            strncpy(&m_formatStatusText[14], "---", 82-14);
        }

        char dest[13];

        if (getDecoder().getYSFDecoder().radioIdMode())
        {
            snprintf(dest, 12, "%-5s:%-5s",
                    getDecoder().getYSFDecoder().getDestId(),
                    getDecoder().getYSFDecoder().getSrcId());
        }
        else
        {
            snprintf(dest, 11, "%-10s", getDecoder().getYSFDecoder().getDest());
        }

        snprintf(&m_formatStatusText[17], 82-17, "|%-10s>%s|%-10s>%-10s|%-5s",
                getDecoder().getYSFDecoder().getSrc(),
                dest,
                getDecoder().getYSFDecoder().getUplink(),
                getDecoder().getYSFDecoder().getDownlink(),
                getDecoder().getYSFDecoder().getRem4());

        m_signalFormat = signalFormatYSF;
        break;
    default:
        m_signalFormat = signalFormatNone;
        m_formatStatusText[0] = '\0';
        break;
    }

    m_formatStatusText[82] = '\0'; // guard
}