//读取备份文件 void LogReader::readBackupFile() throw(ReadException) { cout << "读取备份文件开始..." << endl; //打开备份文件 ifstream ifs(m_backupFile.c_str(),ios::binary); if(!ifs) { throw ReadException("备份文件打开失败"); } //读取指定的数据内容 struct stat s; //获取文件的状态信息出错 if(-1 == stat(m_backupFile.c_str(),&s)) { throw ReadException("无法获取文件的大小"); } int num = s.st_size/372; int count = 0; for(int i = 0; i < num; i++) { //定义一个临时结构体 LogRec tempLog; //每次文件头开始按照372整数倍的位置开始读取 ifs.seekg(372*i,ios::beg); ifs.read(tempLog.logname,32); ifs.seekg(36,ios::cur); ifs.read((char*)&tempLog.pid,4); //从网络字节序转换为本机字节序 tempLog.pid = ntohl(tempLog.pid); short type; ifs.read((char*)&type,2); type = ntohs(type); ifs.seekg(6,ios::cur); ifs.read((char*)&tempLog.logtime,4); tempLog.logtime = ntohl(tempLog.logtime); ifs.seekg(30,ios::cur); ifs.read(tempLog.logip,257); //把读取到的记录分类保存 //把以.开头的非法用户过滤掉 if(tempLog.logname[0] != '.') { if(7 == type) { m_logins.push_back(tempLog); } else if(8 == type) { m_logouts.push_back(tempLog); } } } ifs.close(); cout << "登入集合的大小是:" << m_logins.size() << endl; cout << "登出集合的大小是:" << m_logouts.size() << endl; cout << "读取备份文件结束" << endl; }
TElementPtr DeclarationParser :: parseNameGroup() { TElementPtr group = mSchema->createElement("elements"); TElementPtr element; parseTsStar(); if (mToken == IDENTIFIER_SYM) { element = mSchema->createElement(mScanner->getTokenText()); group->appendChild(element); mToken = mScanner->nextToken(); bool inGroup = true; while (inGroup) { parseTsStar(); if (mToken == AND_SYM || mToken == COMMA_SYM || mToken == PIPE_SYM) { mToken = mScanner->nextToken(); } else { inGroup = false; continue; } parseTsStar(); if (mToken == IDENTIFIER_SYM) { element = mSchema->createElement(mScanner->getTokenText()); group->appendChild(element); mToken = mScanner->nextToken(); } else { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Name expected", GENERIC, true); } } } else { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Name expected", GENERIC, true); } parseTsStar(); if (mToken != RIGHT_BRACKET_SYM) { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Name group not closed correctly", GENERIC, true); } mToken = mScanner->nextToken(); return group; }
void DeclarationParser :: parseNameTokenGroup() { parseTsStar(); if (mToken == IDENTIFIER_SYM || mToken == NUMBER_SYM) { mToken = mScanner->nextToken(); bool inGroup = true; while (inGroup) { parseTsStar(); if (mToken == AND_SYM || mToken == COMMA_SYM || mToken == PIPE_SYM) { mToken = mScanner->nextToken(); } else { inGroup = false; continue; } parseTsStar(); if (mToken == IDENTIFIER_SYM || mToken == NUMBER_SYM) { mToken = mScanner->nextToken(); } else { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Name expected", GENERIC, true); } } } else { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Name expected", GENERIC, true); } parseTsStar(); if (mToken != RIGHT_BRACKET_SYM) { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Name group not closed correctly", GENERIC, true); } mToken = mScanner->nextToken(); }
void DeclarationParser :: parseExternalId(string aToken, TElementPtr aEntity) { if (aToken == kPUBLIC) { parsePsPlus(); if (mToken != TEXT_SYM) { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Public id expected", GENERIC, true); } aEntity->setAttribute("text", mScanner->getTokenText()); mToken = mScanner->nextToken(); } if (mToken == SPACE_SYM) { parsePsPlus(); if (mToken == TEXT_SYM) { mToken = mScanner->nextToken(); /* throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "System id expected", GENERIC, true); */ } } }
void StarCatalog::readAscii(std::string file, std::string delim) { std::ifstream fin(file.c_str()); if (!fin) { throw ReadException("Error opening stars file"+file); } _id.clear(); _pos.clear(); _sky.clear(); _noise.clear(); _flags.clear(); _mag.clear(); _sg.clear(); _objsize.clear(); _is_star.clear(); if (delim == " ") { ConvertibleString flag; long id1,star; double x,y,sky1,n,s,m,sg; while ( fin >> id1 >> x >> y >> sky1 >> n >> flag >> m >> sg >> s >> star ) { _id.push_back(id1); _pos.push_back(Position(x,y)); _sky.push_back(sky1); _noise.push_back(n); _flags.push_back(flag); _mag.push_back(m); _sg.push_back(sg); _objsize.push_back(s); _is_star.push_back(star); } } else {
void PsfCatalog::readAscii(std::string file, std::string delim) { std::ifstream fin(file.c_str()); if (!fin) { throw ReadException("Error opening psf file"+file); } _id.clear(); _pos.clear(); _sky.clear(); _noise.clear(); _flags.clear(); _nu.clear(); _psf.clear(); if (delim == " ") { ConvertibleString flag; long id1,border; double x,y,sky1,noise1,nu1,bsigma; while ( fin >> id1 >> x >> y >> sky1 >> noise1 >> flag >> nu1 >> border >> bsigma) { _id.push_back(id1); _pos.push_back(Position(x,y)); _sky.push_back(sky1); _noise.push_back(noise1); _flags.push_back(flag); _nu.push_back(nu1); _psf.push_back(BVec(border,bsigma)); const int ncoeffs = _psf.back().size(); for(int j=0;j<ncoeffs;++j) fin >> _psf.back()(j); } } else {
// readFileLists reads the srclist file specified in params // and reads the names of the images and fitpsf void MultiShearCatalog::readFileLists() { std::string file = _params.get("coadd_srclist"); if (!DoesFileExist(file)) { throw FileNotFoundException(file); } try { dbg<<"Opening coadd srclist\n"; std::ifstream flist(file.c_str(), std::ios::in); if (!flist) { throw ReadException("Unable to open source list file " + file); } _image_file_list.clear(); _shear_file_list.clear(); _fitpsf_file_list.clear(); _skymap_file_list.clear(); std::string image_filename; std::string shear_filename; std::string fitpsf_filename; std::string skymap_filename; bool isSkyMapIn_list = _params.read("multishear_skymap_in_list",false); while (flist >> image_filename >> shear_filename >> fitpsf_filename) { dbg<<"Files are :\n"<<image_filename<<std::endl; dbg<<shear_filename<<std::endl; dbg<<fitpsf_filename<<std::endl; if (isSkyMapIn_list) { flist >> skymap_filename; if (!flist) { throw ReadException( "Unable to read skymap_filename in list file " + file); } addImage(image_filename,fitpsf_filename, shear_filename,skymap_filename); } else { addImage(image_filename,fitpsf_filename,shear_filename); } } if (isSkyMapIn_list) { Assert(_skymap_file_list.size() == _image_file_list.size()); } } catch (std::exception& e) {
void BaseParser :: parsePsPlus() { if (!parsePs()) { throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Space expected", GENERIC, true); } mToken = mScanner->nextToken(); parsePsStar(); }
//给外界提供的公开接口函数 list<MLogRec>& LogReader::readLog() throw(ReadException) { cout << "读取日志记录开始..." << endl; try { //备份日志文件 backup(); //读取未匹配的记录 readLoginsFile(); //读取备份文件 readBackupFile(); //匹配 match(); //保存未匹配的记录 saveLoginsFile(); } catch(BackupException& ex) { throw BackupException("备份错误"); } catch(ReadException& ex) { throw ReadException("读取错误"); } catch(SaveException& ex) { throw SaveException("存储错误"); } //表示可以捕获任何异常 catch(...) { throw ReadException("未知错误"); } cout << "读取日志记录结束" << endl; return m_logs; }
//读取未匹配的登入记录 void LogReader::readLoginsFile() throw(ReadException) { cout << "读取未匹配的登入记录开始..." << endl; ifstream ifs(m_loginsFile.c_str()); if(ifs) { LogRec log; //每次读取一个结构体大小,然后放入集合中 while(ifs.read((char*)&log,sizeof(log))) { m_logins.push_back(log); } if(!ifs.eof()) { throw ReadException(); } } ifs.close(); cout << "读取未匹配的登入记录结束" << endl; }
string HTMLParser :: getString( bool aConserveSpaces ) { string result; switch ( mContent[mPos] ) { case '>' : { cout << mLineNr << " " << mCharNr << ": Getting text...\n"; result = getText( aConserveSpaces ); mStringType = TEXT; break; } case '<' : { cout << mLineNr << " " << mCharNr << ": Getting tag...\n"; result = getTag(); mStringType = TAG; break; } case '=' : { cout << mLineNr << " " << mCharNr << ": Getting attribute value: "; result = getAttrValue(); cout << result << endl; mStringType = ATTRVALUE; break; } default : { cout << mLineNr << " " << mCharNr << ": Getting attribute: "; result = getAttribute(); cout << result << endl; mStringType = ATTR; } } if ( mPos == mContent.size() && result == "" ) { throw ReadException(); } return result; }
void HTMLParser :: startParsing( TDocumentShared aDocument ) { mDocument = aDocument; bool insideDoc = true; try { if ( mContent.size() == 0 ) { throw ReadException(); } while ( insideDoc ) { getTag(); if ( isDocTypeTag() ) { doctypeTag(); continue; } if ( isHtmlTag() ) { htmlTag(); // Last tag, quit the loop insideDoc = false; continue; } cout << "startParsing: Unexpected tag found: " << mTag << ". Skipping...\n"; skipTag(); } } catch ( ReadException r ) { cout << "Unexpected end of file..\n"; cout << "Returning partial tree\n"; } }
void HTMLParser :: nextChar() { if ( mPos != mContent.size() ) { if ( mContent[mPos] != '\n' ) { if ( mContent[mPos] != '\t' ) { mCharNr++; } else { // I'm counting 4 characters for a tab mCharNr += 4; } } else { mLineNr++; mCharNr = 1; } mPos++; } else { throw ReadException(); } }
void ShearCatalog::readAscii(std::string file, std::string delim) { std::ifstream fin(file.c_str()); if (!fin) { throw ReadException("Error opening stars file"+file); } int full_order = _params.get("shear_gal_order"); _id.clear(); _pos.clear(); _sky.clear(); _noise.clear(); _flags.clear(); _skypos.clear(); _shear.clear(); _nu.clear(); _cov.clear(); _meas_galorder.clear(); _shape.clear(); if (delim == " ") { ConvertibleString flag; long id1,border; double x,y,sky1,noise1,ra,dec,s1,s2,nu1,c00,c01,c11,bsigma; while ( fin >> id1 >> x >> y >> sky1 >> noise1 >> flag >> ra >> dec >> s1 >> s2 >> nu1 >> c00 >> c01 >> c11 >> border >> bsigma ) { _id.push_back(id1); _pos.push_back(Position(x,y)); _sky.push_back(sky1); _noise.push_back(noise1); _flags.push_back(flag); _skypos.push_back(Position(ra*3600.,dec*3600.)); _shear.push_back(std::complex<double>(s1,s2)); _nu.push_back(nu1); _cov.push_back(DSmallMatrix22()); _cov.back() << c00, c01, c01, c11; _meas_galorder.push_back(border); _shape.push_back(BVec(full_order,bsigma)); const int ncoeff = _shape.back().size(); for(int j=0; j<ncoeff; ++j) fin >> _shape.back()(j); } } else {
void SGMLParser :: parseBaseDocTypeDecl() { // If we don't find a doctype declaration, we skip it. if (mToken == DECLARATION_SYM) { mToken = mScanner->nextToken(); if (mToken == IDENTIFIER_SYM) { string identifier = mScanner->getTokenText(); if (identifier == "DOCTYPE") { mToken = mScanner->nextToken(); mToken = mDocTypeDeclParser->parse(mToken); } else { // Found unknown token. string message = "Unknown token found: " + mScanner->getTokenText(); throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), message, GENERIC, true); } } } }
void StarCatalog::readFits(std::string file) { int hdu = GetHdu(_params,"stars",file,2); dbg<<"Opening StarCatalog file "<<file<<" at hdu "<<hdu<<std::endl; CCfits::FITS fits(file, CCfits::Read); if (hdu > 1) fits.read(hdu-1); CCfits::ExtHDU& table=fits.extension(hdu-1); long nRows=table.rows(); dbg<<" nrows = "<<nRows<<std::endl; if (nRows <= 0) { throw ReadException( "StarCatalog found to have 0 rows. Must have > 0 rows."); } std::string id_col=_params.get("stars_id_col"); std::string x_col=_params.get("stars_x_col"); std::string y_col=_params.get("stars_y_col"); std::string sky_col=_params.get("stars_sky_col"); std::string noise_col=_params.get("stars_noise_col"); std::string flags_col=_params.get("stars_flags_col"); std::string mag_col=_params.get("stars_mag_col"); std::string sg_col=_params.get("stars_sg_col"); std::string objsize_col=_params.get("stars_objsize_col"); std::string is_star_col=_params.get("stars_isastar_col"); long start=1; long end=nRows; dbg<<"Reading columns"<<std::endl; // will copy these out to positions types std::vector<double> x(nRows); std::vector<double> y(nRows); dbg<<" "<<id_col<<std::endl; table.column(id_col).read(_id, start, end); dbg<<" "<<x_col<<" "<<y_col<<std::endl; table.column(x_col).read(x, start, end); table.column(y_col).read(y, start, end); dbg<<" "<<sky_col<<std::endl; table.column(sky_col).read(_sky, start, end); dbg<<" "<<noise_col<<std::endl; table.column(noise_col).read(_noise, start, end); dbg<<" "<<flags_col<<std::endl; table.column(flags_col).read(_flags, start, end); dbg<<" "<<mag_col<<std::endl; table.column(mag_col).read(_mag, start, end); dbg<<" "<<sg_col<<std::endl; table.column(sg_col).read(_sg, start, end); dbg<<" "<<objsize_col<<std::endl; table.column(objsize_col).read(_objsize, start, end); dbg<<" "<<is_star_col<<std::endl; table.column(is_star_col).read(_is_star, start, end); _pos.resize(nRows); for(long i=0;i<nRows;++i) { _pos[i] = Position(x[i],y[i]); } }
void PFPFile::load(FileObject &ff) /*!\brief PFP-File laden * * Mit dieser Funktion wird ein PFP-File in die Klasse geladen. Dabei wird zuerst der Header geladen * und überprüft, ob es sich um ein gültiges PFP-File handelt. Dann wird die virtuelle Funktion * PFPFile::LoadRequest mit ID, Haupt- und Unterversion als Parameter aufgerufen. Liefert diese * nicht true (1) zurück, wird der Ladevorgang abgebrochen. Andernfalls wird fortgeführt * und geprüft, ob der * Datenbereich komprimiert ist und gegebenenfalls dekomprimiert. Erst danach werden die * einzelnen Chunks eingelesen. Kommt es dabei zu Fehlern durch ungültige Chunks, werden diese * ignoriert und die Funktion gibt den Fehlercode 434 zurück. * * \param ff Pointer auf eine CFile-Klasse, mit der die einzulesende Datei geöffnet wurde. * \returns Konnte die Datei fehlerfrei eingelesen werden, gibt die Funktion true (1) zurück, * im Fehlerfall false (0). Ein entsprechender Fehlercode wird gesetzt. * * \remarks * Vor dem Laden der Datei wird die Funktion PFPFile::Clear aufgerufen, so dass eventuell vorher * vorhandene Daten verloren gehen. * * \since Version 6.1.0 */ { const char *p; try { p=ff.map(0,24); } catch (OverflowException &) { throw InvalidFormatException(); } if (strncmp(p,"PFP-File",8)!=0) throw InvalidFormatException(); if (Peek8(p+8)!=3) throw InvalidFormatException(); size_t z,fsize; // Wir haben ein gültiges PFP-File, aber dürfen wir es auch laden? char tmpid[5]; tmpid[4]=0; strncpy(tmpid,p+10,4); int t1,t2; t1=Peek8(p+15); t2=Peek8(p+14); if (!loadRequest(tmpid,t1,t2)) { throw AccessDeniedByInstanceException(); } clear(); id.set(p+10,4); mainversion=Peek8(p+15); subversion=Peek8(p+14); comp=(Compression::Algorithm)Peek8(p+16); size_t hsize=Peek8(p+9); char *u=NULL; if (comp) { p=(char*)ff.map(hsize,8); if (!p) throw ReadException(); size_t sizeunk=Peek32(p); size_t sizecomp=Peek32(p+4); p=ff.map(hsize+8,sizecomp); if (!p) throw ReadException(); u=(char*)malloc(sizeunk+1); if (!u) throw OutOfMemoryException(); size_t dstlen=sizeunk; Compression c; try { c.init(comp); c.uncompress(u,&dstlen,p,sizecomp); } catch (...) { free(u); clear(); throw; } if (dstlen!=sizeunk) { free(u); clear(); throw DecompressionFailedException(); } u[dstlen]=0; p=u; fsize=dstlen; } else { p=ff.map(); p+=hsize; fsize=ff.size()-hsize; } // Wir haben nun den ersten Chunk ab Pointer p z=0; String Chunkname; try { size_t size=0; while ((z+=size)<fsize) { size=Peek32(p+z+4); if (strncmp(p+z,"ENDF",4)==0) break; if (!size) break; // Falls z+size über das Ende der Datei geht, stimmt mit diesem Chunk was nicht if (z+size>fsize) break; PFPChunk *chunk=new PFPChunk; if (!chunk) throw OutOfMemoryException(); Chunkname.set(p+z,4); chunk->setName(Chunkname); chunk->setData(p+z+8,size-8); addChunk(chunk); } } catch (...) { if (u) free(u); clear(); throw; } if (u) free(u); }
void SGMLParser :: parse(const char * aSchemaFile, const char * aDocument) { printf("Loading schema\n"); clock_t start = clock(); loadSchema(aSchemaFile); clock_t end = clock(); printf("Time taken for loading the schema: %f\n", (double)(end - start)/CLOCKS_PER_SEC); start = clock(); printf("Starting to scan the HTML document\n"); mScanner->setDocument(aDocument); printf("Loaded the document\n"); // Assume the doctype is HTML. mDocTypeName = "HTML"; ElementParser elementParser(mScanner, mSchema, mDocTypeName); // See if we can scan a whole HTML document. try { mToken = mScanner->nextToken(); parseSStar(); printf("Got first token: %s\n", mScanner->getTokenText().c_str()); parseProlog(); while (mToken != EOF_SYM) { switch (mToken) { case ELEMENT_OPEN_SYM: { // Kickstart the element parser. TElementPtr element = elementParser.parseStartTag(); TDOMString name = element->getTagName(); ElementToken elmToken = ElementToken(START_TAG, name, element); TElementDeclarationPtr declaration = mSchema->getDeclaration(mDocTypeName); mToken = elementParser.parse(elmToken, declaration); break; } case DECLARATION_SYM: { mToken = mScanner->nextToken(); if (mToken == COMMENT_SYM) { if (mCommentDeclParser == NULL) mCommentDeclParser = new CommentDeclParser(mScanner, TSchemaPtr()); mToken = mCommentDeclParser->parse(mToken, ELEMENT_OPEN_SYM); } else throw ReadException(mScanner->getLineNr(), mScanner->getCharNr(), "Expected comment sym", GENERIC, true); break; } case DECLARATION_END_SYM: { mToken = mScanner->nextToken(ELEMENT_OPEN_SYM); break; } case TEXT_SYM: { mToken = mScanner->nextToken(); break; } case SPACE_SYM: { // Not doing anything with that right now. mToken = mScanner->nextToken(); break; } default: { printf("Found token: %s\n", mScanner->getTokenText().c_str()); mToken = mScanner->nextToken(); } } } } catch(ReadException r) { printf( "Found error: line: %i char %i message: %s\n", r.getLineNr(), r.getCharNr(), r.getErrorMessage().c_str()); } end = clock(); printf("Time taken: %f\n", (double)(end - start)/CLOCKS_PER_SEC); TDocumentPtr document = elementParser.getDocument(); showTree(document, 0); }
void PsfCatalog::readFits(std::string file) { int hdu = GetHdu(_params,"psf",file,2); dbg<<"Opening PsfCatalog file "<<file<<" at hdu "<<hdu<<std::endl; CCfits::FITS fits(file, CCfits::Read); if (hdu > 1) fits.read(hdu-1); CCfits::ExtHDU& table=fits.extension(hdu-1); long nrows=table.rows(); dbg<<" nrows = "<<nrows<<std::endl; if (nrows <= 0) { throw ReadException( "PSFCatalog found to have 0 rows. Must have > 0 rows."); } std::string id_col=_params.get("psf_id_col"); std::string x_col=_params.get("psf_x_col"); std::string y_col=_params.get("psf_y_col"); std::string sky_col=_params.get("psf_sky_col"); std::string noise_col=_params.get("psf_noise_col"); std::string flags_col=_params.get("psf_flags_col"); std::string nu_col=_params.get("psf_nu_col"); std::string order_col=_params.get("psf_order_col"); std::string sigma_col=_params.get("psf_sigma_col"); std::string coeffs_col=_params.get("psf_coeffs_col"); long start=1; long end=nrows; dbg<<"Reading columns"<<std::endl; dbg<<" "<<id_col<<std::endl; table.column(id_col).read(_id, start, end); dbg<<" "<<x_col<<" "<<y_col<<std::endl; _pos.resize(nrows); std::vector<double> x; std::vector<double> y; table.column(x_col).read(x, start, end); table.column(y_col).read(y, start, end); for(long i=0;i<nrows;++i) _pos[i] = Position(x[i],y[i]); dbg<<" "<<sky_col<<std::endl; table.column(sky_col).read(_sky, start, end); dbg<<" "<<noise_col<<std::endl; table.column(noise_col).read(_noise, start, end); dbg<<" "<<flags_col<<std::endl; table.column(flags_col).read(_flags, start, end); dbg<<" "<<nu_col<<std::endl; table.column(nu_col).read(_nu, start, end); // these are temporary std::vector<double> sigma; std::vector<long> order; dbg<<" "<<sigma_col<<std::endl; table.column(sigma_col).read(sigma, start, end); dbg<<" "<<order_col<<std::endl; table.column(order_col).read(order, start, end); // gotta loop for this one _psf.reserve(nrows); for (int i=0; i<nrows; ++i) { int row=i+1; _psf.push_back(BVec(order[i],sigma[i])); int ncoeff=(order[i]+1)*(order[i]+2)/2; // although we are allowed to write lots of different ways, the // reading is less flexible. We can *only* read a vector // column into a valarray, period std::valarray<double> coeffs; table.column(coeffs_col).read(coeffs, row); for (int j=0; j<ncoeff; ++j) _psf[i](j) = coeffs[j]; } }
/** * Reads the specified number of bytes from memory uint32_to the buffer. * If all of the bytes could not be read, a ReadException is thrown. * * @param address The starting address to read from. * @param buffer The destination buffer to save uint32_to. * @param size The number of bytes to read. */ void Read(uint32_t address, void* buffer, size_t size) const { StackTrace trace(__METHOD__, __FILE__, __LINE__); if (process_read(_handle, address, buffer, size) == false) { throw ReadException(); } }
void ShearCatalog::readFits(std::string file) { int hdu = GetHdu(_params,"shear",file,2); dbg<<"Opening ShearCatalog file "<<file<<" at hdu "<<hdu<<std::endl; CCfits::FITS fits(file, CCfits::Read); if (hdu > 1) fits.read(hdu-1); CCfits::ExtHDU& table=fits.extension(hdu-1); long nRows=table.rows(); dbg<<" nrows = "<<nRows<<std::endl; if (nRows <= 0) { throw ReadException( "ShearCatalog found to have 0 rows. Must have > 0 rows."); } std::string id_col=_params.get("shear_id_col"); std::string x_col=_params.get("shear_x_col"); std::string y_col=_params.get("shear_y_col"); std::string sky_col=_params.get("shear_sky_col"); std::string noise_col=_params.get("shear_noise_col"); std::string flags_col=_params.get("shear_flags_col"); std::string ra_col=_params.get("shear_ra_col"); std::string dec_col=_params.get("shear_dec_col"); std::string shear1_col=_params.get("shear_shear1_col"); std::string shear2_col=_params.get("shear_shear2_col"); std::string nu_col=_params.get("shear_nu_col"); std::string cov00_col=_params.get("shear_cov00_col"); std::string cov01_col=_params.get("shear_cov01_col"); std::string cov11_col=_params.get("shear_cov11_col"); std::string order_col=_params.get("shear_order_col"); std::string sigma_col=_params.get("shear_sigma_col"); std::string coeffs_col=_params.get("shear_coeffs_col"); // MJ: This is not really optimal. // We should get this value from the fits header, since it's stored there. // What is the CCfits command to do this? int full_order = _params.get("shear_gal_order"); long start=1; long end=nRows; dbg<<"Reading columns"<<std::endl; dbg<<" "<<id_col<<std::endl; table.column(id_col).read(_id, start, end); dbg<<" "<<x_col<<" "<<y_col<<std::endl; _pos.resize(nRows); std::vector<double> x; std::vector<double> y; table.column(x_col).read(x, start, end); table.column(y_col).read(y, start, end); for(long i=0;i<nRows;++i) _pos[i] = Position(x[i],y[i]); dbg<<" "<<sky_col<<std::endl; table.column(sky_col).read(_sky, start, end); dbg<<" "<<noise_col<<std::endl; table.column(noise_col).read(_noise, start, end); dbg<<" "<<flags_col<<std::endl; table.column(flags_col).read(_flags, start, end); dbg<<" "<<ra_col<<" "<<dec_col<<std::endl; _skypos.resize(nRows); std::vector<double> ra; std::vector<double> dec; table.column(ra_col).read(ra, start, end); table.column(dec_col).read(dec, start, end); for(long i=0;i<nRows;++i) { _skypos[i] = Position(ra[i]*3600.,dec[i]*3600.); } dbg<<" "<<shear1_col<<" "<<shear2_col<<std::endl; _shear.resize(nRows); std::vector<double> shear1; std::vector<double> shear2; table.column(shear1_col).read(shear1, start, end); table.column(shear2_col).read(shear2, start, end); for(long i=0;i<nRows;++i) { _shear[i] = std::complex<double>(shear1[i],shear2[i]); } dbg<<" "<<nu_col<<std::endl; table.column(nu_col).read(_nu, start, end); dbg<<" "<<cov00_col<<" "<<cov01_col<<" "<<cov11_col<<std::endl; _cov.resize(nRows); std::vector<double> cov00; std::vector<double> cov01; std::vector<double> cov11; table.column(cov00_col).read(cov00, start, end); table.column(cov01_col).read(cov01, start, end); table.column(cov11_col).read(cov11, start, end); for(long i=0;i<nRows;++i) { _cov[i] << cov00[i], cov01[i], cov01[i], cov11[i]; } dbg<<" "<<sigma_col<<" "<<order_col<<std::endl; std::vector<double> sigma(nRows); // temporary table.column(sigma_col).read(sigma, start, end); table.column(order_col).read(_meas_galorder, start, end); dbg<<" "<<coeffs_col<<std::endl; _shape.reserve(nRows); for (int i=0; i<nRows; ++i) { int row=i+1; _shape.push_back(BVec(full_order,sigma[i])); int ncoeff=_shape[i].size(); std::valarray<double> coeffs(ncoeff); table.column(coeffs_col).read(coeffs, row); for(int j=0;j<ncoeff;++j) _shape[i](j) = coeffs[j]; xxdbg<<"shape => "<<_shape[i].vec()<<std::endl; } dbg<<"Done ReadFits\n"; }