bool ICard::readFile( unsigned char sfid, size_t chunk_size, std::vector<unsigned char>& result) { ReadBinary read = ReadBinary(0, sfid); read.setNe(chunk_size); RAPDU response = transceive(read); while (response.isOK() && response.getData().size() == chunk_size) { result.insert(result.end(), response.getData().begin(), response.getData().end()); read = ReadBinary(result.size()); read.setNe(chunk_size); response = transceive(read); } result.insert(result.end(), response.getData().begin(), response.getData().end()); if (result.empty()) { return response.isOK(); } return true; }
bool LogSegment::ReadMessage(offset_t& offset, size_t& pos, uint8_t*& key, uint32_t& keyLen, uint8_t*& value, uint32_t& valueLen) const { if (pos >= m_writePos) { return false; } ReadData(pos, offset); uint32_t messageLen; ReadData(pos, messageLen); ReadBinary(pos, key, keyLen); ReadBinary(pos, value, valueLen); return true; }
int main(int argc, char** argv) { WBXML_INFO buffer; FILE* file; if (argc < 2) { file = stdin; } else { file = fopen(argv[1], "r"); if (!file) { ParseError(ERR_FILE_NOT_FOUND); } } Init(&buffer); ReadBinary(&buffer, file); Read_start(&buffer); DumpNodes(&buffer); Free(&buffer); return 0; }
bool ICard::readFile( std::vector<unsigned char>& result) { ReadBinary read = ReadBinary(); read.setNe(CAPDU::DATA_EXTENDED_MAX); RAPDU response = transceive(read); result = response.getData(); return response.isOK(); }
CByteArray CPkiCard::ReadUncachedFile(const std::string & csPath, unsigned long ulOffset, unsigned long ulMaxLen) { CByteArray oData(ulMaxLen); CAutoLock autolock(this); tFileInfo fileInfo = SelectFile(csPath, true); // Loop until we've read ulMaxLen bytes or until EOF (End Of File) bool bEOF = false; for (unsigned long i = 0; i < ulMaxLen && !bEOF; i += MAX_APDU_READ_LEN) { unsigned long ulLen = ulMaxLen - i <= MAX_APDU_READ_LEN ? ulMaxLen - i : MAX_APDU_READ_LEN; CByteArray oResp = ReadBinary(ulOffset + i, ulLen); unsigned long ulSW12 = getSW12(oResp); // If the file is a multiple of the block read size, you will get // an SW12 = 6B00 (at least with BE eID) but that OK then.. if (ulSW12 == 0x9000 || (i != 0 && ulSW12 == 0x6B00)) oData.Append(oResp.GetBytes(), oResp.Size() - 2); else if (ulSW12 == 0x6982) { throw CNotAuthenticatedException( EIDMW_ERR_NOT_AUTHENTICATED, fileInfo.lReadPINRef); } else if (ulSW12 == 0x6B00) throw CMWEXCEPTION(EIDMW_ERR_PARAM_RANGE); else if (ulSW12 == 0x6D00) throw CMWEXCEPTION(EIDMW_ERR_NOT_ACTIVATED); else throw CMWEXCEPTION(m_poContext->m_oPCSC.SW12ToErr(ulSW12)); // If the driver/reader itself did the 6CXX handling, // we assume we're at the EOF if (oResp.Size() < MAX_APDU_READ_LEN) bEOF = true; } MWLOG(LEV_INFO, MOD_CAL, L" Read file %ls (%d bytes) from card", utilStringWiden(csPath).c_str(), oData.Size()); return oData; }
bool C4Playback::NextSequentialChunk() { StdBuf BinaryBuf; size_t iRealSize; BinaryBuf.New(4096); // load data until a chunk could be filled for (;;) { iRealSize = 0; playbackFile.Read(BinaryBuf.getMData(), 4096, &iRealSize); if (!iRealSize) return false; BinaryBuf.SetSize(iRealSize); if (!ReadBinary(BinaryBuf)) return false; // okay, at least one chunk has been read! if (chunks.size()) { currChunk = chunks.begin(); return true; } } // playback file reading failed - looks like we're done return false; }
void Labeler::loadModelFile(const string& inputModelFile) { std::cout << "Start load model from file: " << inputModelFile << std::endl; LStream inf(inputModelFile, "rb"); m_options.loadModel(inf); m_options.showOptions(); m_wordAlphabet.loadModel(inf); m_charAlphabet.loadModel(inf); m_labelAlphabet.loadModel(inf); m_featAlphabet.loadModel(inf); m_classifier.loadModel(inf); int m_tagAlphabets_size; ReadBinary(inf, m_tagAlphabets_size); m_tagAlphabets.resize(m_tagAlphabets_size); for (int idx = 0; idx < m_tagAlphabets_size; idx++) { m_tagAlphabets[idx].loadModel(inf); } ReadString(inf, nullkey); ReadString(inf, unknownkey); ReadString(inf, seperateKey); std::cout << "Model has been loaded from file: " << inputModelFile << std::endl; }
void BSONInputStreamGZ::SkipBytes(UInt32 count) throw(BSONReadException) { ReadBinary(NULL, count); }
/*---------------------------------------------------------------------------------------------- Load data into the cache from the record set defined by hstmt, according to the specs in prgocs/cocs. Columns with m_icolID = 0 give properties of hvoBase. Load properties of at most crowMax objects; this may only be used if there is no vector property being loaded, since we could not be sure of having a complete record of the value of a vector without loading the next row. If crowMax is zero, load everything. Note: call from inside try/catch block; may throw exceptions. Note that prgocs[i] describes the column which ODBC indexes as [i+1]. ----------------------------------------------------------------------------------------------*/ void VwRsOdbcDa::Load(SQLHSTMT hstmt, OdbcColSpec * prgocs, int cocs, HVO hvoBase, int crowMax) { AssertArray(prgocs, cocs); Assert((uint)cocs <= (uint) 200); // limit because of size of rghvoBaseIds Assert(crowMax >= 0); ITsStrFactoryPtr qtsf; qtsf.CreateInstance(CLSID_TsStrFactory); ITsPropsFactoryPtr qtpf; qtpf.CreateInstance(CLSID_TsPropsFactory); // Block of variables for binary fields Vector<byte> vbData; // used to buffer data from binary fields const int kcbMaxData = 1000; // amount of binary data to read in one go byte rgbData[kcbMaxData]; // buffer for short binary data fields long cbData; // how many bytes in prgbData hold valid data byte * prgbData; // points to rgbData or vbData.Begin(), as appropriate // Similar block for Unicode text Vector<wchar> vchData; const int kcchMaxData = 1000; wchar rgchData[kcchMaxData]; long cchData; wchar * prgchData; Vector<HVO> vhvo; // accumulate objects for sequence property int nrows = 0; if (crowMax == 0) crowMax = INT_MAX; HVO rghvoBaseIds[200]; int icolVec = -1; // index of (one and only) column of type koctObjVec int hvoVecBase; // object that is base of vector property while (CheckSqlRc(SQLFetch(hstmt)) != SQL_NO_DATA) { // We have a record. for (int icol = 0; icol < cocs; icol++) { int nVal; HVO hvoVal; ITsStringPtr qtssVal; // TOxDO JohnT: fill this in... HVO hvoCurBase; // object whose property we will read. if (prgocs[icol].m_icolID == 0) hvoCurBase = hvoBase; else { // Must refer to a previous column; use <= because m_icolID is 1-based, so // if equal to i, it refers to the immediate previous column. Assert(prgocs[icol].m_icolID <= icol); hvoCurBase = rghvoBaseIds[prgocs[icol].m_icolID - 1]; } switch (prgocs[icol].m_oct) { default: Assert(false); ThrowHr(WarnHr(E_UNEXPECTED)); case koctInt: CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol + 1), SQL_C_SLONG, &nVal, 4, NULL)); CacheIntProp(hvoCurBase, prgocs[icol].m_tag, nVal); break; case koctUnicode: ReadUnicode(hstmt, icol + 1, rgchData, kcchMaxData, vchData, prgchData, cchData); CacheUnicodeProp(hvoCurBase, prgocs[icol].m_tag, prgchData, cchData); break; case koctString: case koctMlsAlt: case koctMltAlt: // Next column must give format; both are for the same property ReadUnicode(hstmt, icol + 1, rgchData, kcchMaxData, vchData, prgchData, cchData); if (koctMltAlt != prgocs[icol].m_oct) { Assert(icol < cocs - 1 && prgocs[icol + 1].m_oct == koctFmt); Assert(prgocs[icol].m_tag == prgocs[icol + 1].m_tag); // Leave the data in prgchData and cchData, to be processed next iteration // when we read the format. break; } // A MS alt without a FMT column, use the specified writing system both for the string // formatting and to indicate the alternative. CheckHr(qtsf->MakeStringRgch(prgchData, cchData, prgocs[icol].m_ws, &qtssVal)); CacheStringAlt(hvoCurBase, prgocs[icol].m_tag, prgocs[icol].m_ws, qtssVal); break; case koctFmt: // Previous column must be string or multistring; we have already checked same tag. Assert(icol > 0 && (prgocs[icol - 1].m_oct == koctString || prgocs[icol - 1].m_oct == koctMlsAlt)); ReadBinary(hstmt, icol + 1, rgbData, kcbMaxData, vbData, prgbData, cbData); int cbDataInt; cbDataInt = cbData; int cchDataInt; cchDataInt = cchData; if (cchDataInt == 0 && cbDataInt == 0) CheckHr(qtsf->MakeStringRgch(NULL, 0, prgocs[icol - 1].m_ws, &qtssVal)); else CheckHr(qtsf->DeserializeStringRgch(prgchData, &cchDataInt, prgbData, &cbDataInt, &qtssVal)); if (prgocs[icol - 1].m_oct == koctString) { CacheStringProp(hvoCurBase, prgocs[icol].m_tag, qtssVal); } else { CacheStringAlt(hvoCurBase, prgocs[icol].m_tag, prgocs[icol - 1].m_ws, qtssVal); } break; case koctObj: case koctBaseId: long nIndicator; CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol + 1), SQL_C_SLONG, &hvoVal, 4, &nIndicator)); // Treat null as zero. if (nIndicator == SQL_NULL_DATA) hvoVal = 0; if (prgocs[icol].m_oct == koctObj) CacheObjProp(hvoCurBase, prgocs[icol].m_tag, hvoVal); rghvoBaseIds[icol] = hvoVal; break; case koctObjVec: CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol + 1), SQL_C_SLONG, &hvoVal, 4, NULL)); rghvoBaseIds[icol] = hvoVal; // See if there has been a change in the base column, if so, record value and // start a new one. if (icolVec < 0) { // First iteration, ignore previous object icolVec = icol; hvoVecBase = hvoCurBase; } else { // Only one vector column allowed! Assert(icolVec == icol); if (hvoVecBase != hvoCurBase) { // Started a new vector! Record the old one CacheVecProp(hvoVecBase, prgocs[icolVec].m_tag, vhvo.Begin(), vhvo.Size()); // clear the list out and note new base object vhvo.Clear(); hvoVecBase = hvoCurBase; } } vhvo.Push(hvoVal); break; case koctTtp: ReadBinary(hstmt, icol + 1, rgbData, kcbMaxData, vbData, prgbData, cbData); if (cbData > 0) // otherwise field is null, cache nothing { cbDataInt = cbData; ITsTextPropsPtr qttp; qtpf->DeserializePropsRgb(prgbData, &cbDataInt, &qttp); CacheUnknown(hvoCurBase, prgocs[icol].m_tag, qttp); } break; } } // Stop if we have processed the requested number of rows. nrows++; if (nrows >= crowMax) break; } // If we are processing a vector, we need to fill in the last occurrence if (icolVec >= 0) { CacheVecProp(hvoVecBase, prgocs[icolVec].m_tag, vhvo.Begin(), vhvo.Size()); } }
BOOL C4Playback::Open(C4Group &rGrp) { // clean up Clear(); fLoadSequential = false; iLastSequentialFrame = 0; bool fStrip = false; // get text record file StdStrBuf TextBuf; if (rGrp.LoadEntryString(C4CFN_CtrlRecText, TextBuf)) { if (!ReadText(TextBuf)) return FALSE; } else { // open group? Then do some sequential reading for large files // Can't do this when a dump is forced, because the dump needs all data // Also can't do this when stripping is desired if (!rGrp.IsPacked()) if (!Game.RecordDumpFile.getLength()) if (!fStrip) fLoadSequential = true; // get record file if (fLoadSequential) { if (!rGrp.FindEntry(C4CFN_CtrlRec)) return FALSE; if (!playbackFile.Open( FormatString("%s%c%s", rGrp.GetFullName().getData(), (char)DirectorySeparator, (const char *)C4CFN_CtrlRec).getData())) return FALSE; // forcing first chunk to be read; will call ReadBinary currChunk = chunks.end(); if (!NextSequentialChunk()) { // empty replay??! LogFatal("Record: Binary read error."); return FALSE; } } else { // non-sequential reading: Just read as a whole StdBuf BinaryBuf; if (rGrp.LoadEntry(C4CFN_CtrlRec, BinaryBuf)) { if (!ReadBinary(BinaryBuf)) return FALSE; } else { // file too large? Try sequential loading and parsing /* size_t iSize; if (rGrp.AccessEntry(C4CFN_CtrlRec, &iSize)) { CStdFile fOut; fOut.Create(Game.RecordDumpFile.getData()); fLoadSequential = true; const size_t iChunkSize = 1024*1024*16; // 16M while (iSize) { size_t iLoadSize = Min<size_t>(iChunkSize, iSize); BinaryBuf.SetSize(iLoadSize); if (!rGrp.Read(BinaryBuf.getMData(), iLoadSize)) { LogFatal("Record: Binary load error!"); return FALSE; } iSize -= iLoadSize; if (!ReadBinary(BinaryBuf)) return FALSE; LogF("%d binary remaining", iSize); currChunk = chunks.begin(); if (fStrip) Strip(); StdStrBuf s(ReWriteText()); fOut.WriteString(s.getData()); LogF("Wrote %d text bytes (%d binary remaining)", s.getLength(), iSize); chunks.clear(); } fOut.Close(); fLoadSequential = false; } else*/ { // no control data? LogFatal("Record: No control data found!"); return FALSE; } } } } // rewrite record if (fStrip) Strip(); if (Game.RecordDumpFile.getLength()) { if (SEqualNoCase(GetExtension(Game.RecordDumpFile.getData()), "txt")) ReWriteText().SaveToFile(Game.RecordDumpFile.getData()); else ReWriteBinary().SaveToFile(Game.RecordDumpFile.getData()); } // reset status currChunk = chunks.begin(); Finished = false; // external debugrec file #if defined(DEBUGREC_EXTFILE) && defined(DEBUGREC) #ifdef DEBUGREC_EXTFILE_WRITE if (!DbgRecFile.Create(DEBUGREC_EXTFILE)) { LogFatal("DbgRec: Creation of external file \"" DEBUGREC_EXTFILE "\" failed!"); return FALSE; } else Log("DbgRec: Writing to \"" DEBUGREC_EXTFILE "\"..."); #else if (!DbgRecFile.Open(DEBUGREC_EXTFILE)) { LogFatal("DbgRec: Opening of external file \"" DEBUGREC_EXTFILE "\" failed!"); return FALSE; } else Log("DbgRec: Checking against \"" DEBUGREC_EXTFILE "\"..."); #endif #endif // ok return TRUE; }