bool FMODStreamOut::DoCreate(int buffersize_ms) { if(StreamPointer) { Close(); } int buffersize = (Decoder()->GetInfo()->channels * (Decoder()->GetInfo()->bits_per_sample/8)* buffersize_ms * Decoder()->GetInfo()->frequency)/1000; StreamPointer = FSOUND_Stream_Create(StreamCallback, buffersize, FMOD_STREAM_FORMAT_FLAGS,(int) Decoder()->GetInfo()->frequency, (void *) this); return StreamPointer != NULL; }
void SinglePass(char* filename,char* symboldef, char* opcode_mnemonic, char* operand, unsigned int* opcode){ fp=fopen(filename,"r"); if(fp == NULL){ printf("해당 파일을 읽을 수 없습니다.\n"); return; } while(!feof(fp) && !isDone){ TokenParser(symboldef,opcode_mnemonic,operand); if(letThisLinePass == 0){ Decoder(symboldef,opcode_mnemonic,operand,opcode); Translator(symboldef,opcode_mnemonic,operand,opcode); } //Initalize control signals before next line statusBit = 0; whereIsOnOPTAB = -1; whereIsOnASMTAB = -1; } if(whichPass == 1){ DeployLiteral(&PC); } if(whichPass == 2){ PrintModificationRecord(); //Print modification record ht_print(SYMTAB,"SYMTAB"); } fclose(fp); letThisLinePass=0; isDone = 0; whereIsOnOPTAB =0; whereIsOnASMTAB =0; }
int main() { char array[50]; int key; char b_or_k[1]; printf("Cipher Text : "); gets(array); printf("Brute Force[b] or Key[k] ? :"); scanf("%c",&b_or_k[0]); if(b_or_k[0] == 'b') { Decoder(array); } else if(b_or_k[0] == 'k') { printf("Key : "); scanf("%d",&key); Key(array,key); printf("Plain Text : %s \n\n",array); } else { printf("Error ! "); } system("PAUSE"); return 0; }
int CeltCodec::Decode(const char *data, int dataLength, SoundBuffer &soundFrame) { soundFrame.data.resize(MUMBLE_AUDIO_SAMPLES_IN_FRAME * MUMBLE_AUDIO_SAMPLE_WIDTH / 8); soundFrame.frequency = MUMBLE_AUDIO_SAMPLE_RATE; soundFrame.is16Bit = true; soundFrame.stereo = false; return celt_decode(Decoder(), (const unsigned char*)data, dataLength, (celt_int16*)&soundFrame.data[0], MUMBLE_AUDIO_SAMPLES_IN_FRAME); }
bool IComplexDatum::DeserializeAEONScript (CDatum::ESerializationFormats iFormat, const CString &sTypename, CCharStream *pStream) // DeserializeAEONScript // // Deserialize AEONScript { int i; DWORD dwFlags = OnGetSerializeFlags(); // If we have an open brace then we've stored everything as a structure. if (pStream->GetChar() == '{') { // Object must support this if (!(dwFlags & FLAG_SERIALIZE_AS_STRUCT)) return false; // Parse the structure CAEONScriptParser Parser(pStream); CDatum dData; CAEONScriptParser::ETokens iToken = Parser.ParseToken(&dData); if (iToken != CAEONScriptParser::tkDatum) return false; // Take all the fields in the structure and apply them to our object // (our descendants will do the right thing). for (i = 0; i < dData.GetCount(); i++) SetElement(dData.GetKey(i), dData.GetElement(i)); } // Otherwise we expect base64 encoded data else { // Backup one character because we want the OnDeserialize call to read it. pStream->UnreadChar(); // Deserialize CBase64Decoder Decoder(pStream->GetByteStream()); if (!OnDeserialize(iFormat, sTypename, Decoder)) return false; // Read the next character into the stream pStream->RefreshStream(); pStream->ReadChar(); } return true; }
void TurboCode::doDecode(const itpp::cvec &receivedSignal, itpp::bvec *output, double n0) const { assert(receivedSignal.size() % codeRate_.denominator() == 0); itpp::cvec in1, in2; SeparateReceivedSignal(receivedSignal, &in1, &in2); Decoder(in1, in2, n0, iteration_); itpp::bvec interleaved_output = rsc2_.HardDecision(); (*output) = Deinterleave(interleaved_output, interleaver_); }
BYTEARRAY CBNCSUtilInterface :: CreateKeyInfo( string key, uint32_t clientToken, uint32_t serverToken ) { unsigned char Zeros[] = { 0, 0, 0, 0 }; BYTEARRAY KeyInfo; CDKeyDecoder Decoder( key.c_str( ), key.size( ) ); if( Decoder.isKeyValid( ) ) { UTIL_AppendByteArray( KeyInfo, UTIL_CreateByteArray( (uint32_t)key.size( ), false ) ); UTIL_AppendByteArray( KeyInfo, UTIL_CreateByteArray( Decoder.getProduct( ), false ) ); UTIL_AppendByteArray( KeyInfo, UTIL_CreateByteArray( Decoder.getVal1( ), false ) ); UTIL_AppendByteArray( KeyInfo, UTIL_CreateByteArray( Zeros, 4 ) ); size_t Length = Decoder.calculateHash( clientToken, serverToken ); char *buf = new char[Length]; Length = Decoder.getHash( buf ); UTIL_AppendByteArray( KeyInfo, UTIL_CreateByteArray( (unsigned char *)buf, Length ) ); delete [] buf; } return KeyInfo; }
bool IComplexDatum::DeserializeJSON (const CString &sTypename, const TArray<CDatum> &Data) // DeserializeJSON // // Deserialize from JSON { if (Data.GetCount() == 0 || Data[0].GetBasicType() != CDatum::typeString) return false; // LATER: Handle serialization/deserialization of struct-based objects // Default deserialization CStringBuffer Buffer(Data[0]); CBase64Decoder Decoder(&Buffer); if (!OnDeserialize(CDatum::formatJSON, sTypename, Decoder)) return false; return true; }
/* basically just allocate memory for buffers and tables, and then call Steve B.'s decoder */ int BitmapIO_GIF::GIFDecoder(WORD linewidth) { PixelBuf8 bufBuf(linewidth+1),stackBuf(MAX_CODES+1),suffixBuf(MAX_CODES+1); PixelBuf16 prefixBuf(MAX_CODES+1); BYTE *buf = bufBuf.Ptr(); BYTE *stack = stackBuf.Ptr(); BYTE *suffix = suffixBuf.Ptr(); UWORD *prefix = prefixBuf.Ptr(); for(int i = 0; i <= MAX_CODES; ++i) { stackBuf[i] = 0; suffixBuf[i] = 0; prefixBuf[i] = 0; } for(int i = 0; i <= linewidth; ++i) bufBuf[i] = 0; int ret; ret = OUT_OF_MEMORY; if(buf && stack && suffix && prefix) ret = Decoder(linewidth,buf,stack,suffix,prefix); return(ret); }
void main(void) { FILE *file_speech; /* File of speech data */ FILE *file_synth; /* File of synth data */ FILE *file_encod; /* File of config */ FILE *file_amrResult; FILE *file_amr; FILE *file_dec; char amr_frame[32] = {0}; char amr_filehead[6] = {0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A}; Word16 new_speech[L_FRAME]; /* Pointer to new speech data */ Word16 rserial[SERIAL_FRAMESIZE]; /* Output bitstream buffer */ Word16 oserial[SERIAL_FRAMESIZE]; /* Output bitstream buffer */ Word16 new_synth[L_FRAME]; /* Output bitstream buffer */ Word16 org_synth[L_FRAME]; /* Output bitstream buffer */ Word32 frame; int i; if ((file_speech = fopen ("e:\\ccs_amrl\\amrtest\\testinp4\\T04.INP","rb")) == NULL) { printf("speech file not found\n"); exit(0); } if ((file_encod = fopen ("e:\\ccs_amrl\\amrtest\\testcod4\\122\\T04_122.COD","rb")) == NULL) { printf("encod file not found\n"); exit(0); } if ((file_amr = fopen ("f:\\AMR.AMR","wb")) == NULL) { printf("AMR file not found\n"); exit(0); } if ((file_dec = fopen ("e:\\dec.inp","wb")) == NULL) { printf("AMR file not found\n"); exit(0); } if(fwrite(amr_filehead, sizeof(char), 6, file_amr) != 6) { printf("fwrite() return error 0\n"); exit(0); } if ((file_synth = fopen ("e:\\ccs_amrl\\amrtest\\testcod4\\122\\T04_122.OUT","rb")) == NULL) { printf("synth file not found\n"); exit(0); } //if ((file_amrResult = fopen("E:\\AMR_RESULT.OUT","rb+")) == NULL) //{ // printf("out file not found\n"); // exit(0); //} frame = 0; while (fread (new_speech, 2, L_FRAME, file_speech) == L_FRAME) { frame++; /* get channel memory pointer */ if(1==frame) { Encoder(new_speech,rserial,1); } else { Encoder(new_speech,rserial,0); } /* amr成帧*/ //Glin_Encode_Amr_Frame(amr_frame, rserial); /*if(fwrite(amr_frame, sizeof(char), 32, file_amr) != 32) { printf("fwrite() return error 1\n"); exit(0); }*/ /*与标准序列比较*/ fread (oserial, 2, SERIAL_FRAMESIZE, file_encod); intel2moto(oserial,SERIAL_FRAMESIZE); for (i = 0; i < SERIAL_FRAMESIZE; i++) { if(oserial[i] != rserial[i]) { printf("encoder failed at %d\n",frame); break; } } printf("encoder passed at frame %d\n",frame); if(1==frame) { Decoder(oserial,new_synth,1); } else { Decoder(oserial,new_synth,0); } fread(org_synth, sizeof(Word16), L_FRAME,file_synth); //intel2moto(org_synth,L_FRAME); intel2moto(new_synth,L_FRAME); for (i = 0; i < L_FRAME; i++) { if(org_synth[i] != new_synth[i]) { printf("decoder failed at %d\n",frame); break; } } printf("decoder passed at frame %d\n",frame); fwrite(new_synth, sizeof(Word16), 160, file_dec); } printf("all passed\n"); fclose(file_speech); fclose(file_encod); fclose(file_synth); //return (0); }
UObject* UMP3SoundFactory::FactoryCreateBinary(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, const TCHAR* Type, const uint8*& Buffer, const uint8* BufferEnd, FFeedbackContext* Warn) { FEditorDelegates::OnAssetPreImport.Broadcast(this, Class, InParent, Name, Type); if (mpg123_init == nullptr) { Warn->Logf(ELogVerbosity::Error, TEXT("Function pointer was null. Was %s found?"), DLL_NAME); FEditorDelegates::OnAssetPostImport.Broadcast(this, nullptr); return nullptr; } // if the sound already exists, remember the user settings USoundWave* ExistingSound = FindObject<USoundWave>(InParent, *Name.ToString()); // stop playing the file, if it already exists (e.g. reimport) TArray<UAudioComponent*> ComponentsToRestart; FAudioDeviceManager* AudioDeviceManager = GEngine->GetAudioDeviceManager(); if (AudioDeviceManager && ExistingSound) { AudioDeviceManager->StopSoundsUsingResource(ExistingSound, ComponentsToRestart); } // Read the mp3 header and make sure we have valid data UMP3Decoder Decoder(Warn); Decoder.Init(Buffer, BufferEnd); if (Decoder.BitsPerSample != 16) { Warn->Logf(ELogVerbosity::Error, TEXT("Unreal only supports 16bit WAVE data (%s)."), *Name.ToString()); FEditorDelegates::OnAssetPostImport.Broadcast(this, nullptr); return nullptr; } if (Decoder.Channels != 1 && Decoder.Channels != 2) { Warn->Logf(ELogVerbosity::Error, TEXT("Unreal only supports 1-2 channel WAVE data (Mono/Stereo). (%s)."), *Name.ToString()); FEditorDelegates::OnAssetPostImport.Broadcast(this, nullptr); return nullptr; } //on reimport, reuse settings, wipe data. otherwise create new. (UE4 WAVE import has some more checks, maybe implement, too?) USoundWave* Sound; if (ExistingSound && bMP3SoundFactoryIsReimport) { Sound = ExistingSound; Sound->FreeResources(); Sound->InvalidateCompressedData(); } else { Sound = NewObject<USoundWave>(InParent, Name, Flags); } Sound->AssetImportData->Update(GetCurrentFilename()); TArray<uint8> RawWavBuffer; RawWavBuffer.Reserve((BufferEnd - Buffer) * 16); //actual decoding Decoder.Decode(RawWavBuffer); Sound->RawData.Lock(LOCK_READ_WRITE); void* LockedData = Sound->RawData.Realloc(RawWavBuffer.Num() * RawWavBuffer.GetTypeSize()); FMemory::Memcpy(LockedData, RawWavBuffer.GetData(), RawWavBuffer.Num() * RawWavBuffer.GetTypeSize()); Sound->RawData.Unlock(); RawWavBuffer.Empty(); // Calculate duration. Sound->Duration = (float)Decoder.SizeInBytes / Decoder.Samplerate / Decoder.Channels / (BITS_PER_SAMPLE / 8); Sound->SampleRate = Decoder.Samplerate; Sound->NumChannels = Decoder.Channels; Sound->RawPCMDataSize = Decoder.SizeInBytes; FEditorDelegates::OnAssetPostImport.Broadcast(this, Sound); if (ExistingSound) { Sound->PostEditChange(); } for (int32 ComponentIndex = 0; ComponentIndex < ComponentsToRestart.Num(); ++ComponentIndex) { ComponentsToRestart[ComponentIndex]->Play(); } return Sound; }
void QRcodeReader::decode() { Binarizer binarizer(img); img = binarizer.getBlackMatrix(); if (more) { imshow("original", rgbImg); imshow("test",img); waitKey(0); printf("**************************************************************\n"); printf("Begin detection to find the three finder pattern centers:\n"); } Finder finder = Finder(img); FinderResult fr = finder.find(); if (more) { printf("\n"); printf("Three finder pattern centers:\n"); FinderPoint bL = fr.getBottomLeft(); FinderPoint tL = fr.getTopLeft(); FinderPoint tR = fr.getTopRight(); printf("bottomLeft: (%f, %f)\n", bL.getX(), bL.getY()); printf("topLeft: (%f, %f)\n", tL.getX(), tL.getY()); printf("topRight: (%f, %f)\n", tR.getX(), tR.getY()); Point2f p1 = Point2f(bL.getX(), bL.getY()); circle(rgbImg, p1, 3, Scalar(0,255,0)); Point2f p2 = Point2f(tL.getX(), tL.getY()); circle(rgbImg, p2, 3, Scalar(0,255,0)); Point2f p3 = Point2f(tR.getX(), tR.getY()); circle(rgbImg, p3, 3, Scalar(0,255,0)); imshow("original", rgbImg); waitKey(0); } Detector detector = Detector(img); DetectorResult detectorResult = detector.processFinderPatternInfo(fr); if (more) { vector<FinderPoint> patternPoints = detectorResult.getResultPoints(); BitMatrix bits = detectorResult.getBits(); printf("\n"); printf("Module Size: %f\n", detectorResult.getModuleSize()); printf("Dimension: %d\n", detectorResult.getDimension()); printf("Alignment Pattern : (%f, %f)\n", patternPoints[3].getX(), patternPoints[3].getY()); Point2f p4 = Point2f(patternPoints[3].getX(), patternPoints[3].getY()); circle(rgbImg, p4, 3, Scalar(0,255,0)); imshow("original", rgbImg); waitKey(0); printf("\n"); printf("The bit matrix:\n"); bits.display(); printf("\nDetection Done!\n"); printf("**************************************************************\n"); waitKey(0); } Decoder decoder = Decoder(detectorResult); DecoderResult decoderResult = decoder.decode(); if (more) { printf("Decode:\n"); printf("version : %d\n", decoderResult.getVersion()); printf("Error correct level : %d\n", decoderResult.getEcLevel()); vector<char> resultBytes = decoderResult.getResultBytes(); printf("Data bytes: "); for (int i = 0; i < resultBytes.size(); ++i) { printf("%d ",resultBytes[i]); } printf("\n"); string result = decoderResult.getResultText(); printf("%s\n", result.c_str()); waitKey(0); } else { string result = decoderResult.getResultText(); printf("%s\n", result.c_str()); } }