void generalInit(ECString path) { struct rlimit core_limits; core_limits.rlim_cur = 0; core_limits.rlim_max = 0; setrlimit( RLIMIT_CORE, &core_limits ); struct rlimit stack_limits; stack_limits.rlim_cur = 0; stack_limits.rlim_max = 0; getrlimit( RLIMIT_STACK, &stack_limits ); if (stack_limits.rlim_cur < stack_limits.rlim_max) { stack_limits.rlim_cur = stack_limits.rlim_max; setrlimit( RLIMIT_STACK, &stack_limits ); } if (!endsWith(path, "/")) { path += "/"; } Term::init( path ); readHeadInfo(path); InputTree::init(); UnitRules* ur = new UnitRules; ur->readData(path); Bchart::unitRules = ur; Bchart::readTermProbs(path); MeChart::init(path); Bchart::setPosStarts(); ChartBase::midFactor = (1.0 - (.3684 *ChartBase::endFactor))/(1.0 - .3684); if(Feature::isLM or Feature::useExtraConditioning) ClassRule::readCRules(path); }
void generalInit(ECString path) { struct rlimit core_limits; core_limits.rlim_cur = 0; core_limits.rlim_max = 0; setrlimit( RLIMIT_CORE, &core_limits ); struct rlimit stack_limits; stack_limits.rlim_cur = 0; stack_limits.rlim_max = 0; getrlimit( RLIMIT_STACK, &stack_limits ); if (stack_limits.rlim_cur < stack_limits.rlim_max) { stack_limits.rlim_cur = stack_limits.rlim_max; setrlimit( RLIMIT_STACK, &stack_limits ); } // load locale settings from the environment setlocale(LC_ALL, ""); path = sanitizePath(path); Term::init( path ); readHeadInfo(path); InputTree::init(); UnitRules* ur = new UnitRules; ur->readData(path); Bchart::unitRules = ur; Bchart::readTermProbs(path); MeChart::init(path); Bchart::setPosStarts(); ChartBase::midFactor = (1.0 - (.3684 *ChartBase::endFactor))/(1.0 - .3684); if(Feature::isLM or Feature::useExtraConditioning) ClassRule::readCRules(path); }
int main(int argc, char *argv[]) { ECArgs args( argc, argv ); assert(args.nargs() == 1); ECString path(args.arg(0)); cerr << "At start of pHsgt" << endl; for(int n = 0 ; n < MAXNUMNTS ; n++) numTerm[n] = 0; Term::init( path ); readHeadInfo(path); int sentenceCount = 0; ECString s1lex("^^"); ECString s1nm("S1"); int s1Int = Term::get(s1nm)->toInt(); UnitRules ur; ur.init(); while(cin) { //if(sentenceCount > 4000) break; if(sentenceCount%10000 == 0) cerr << sentenceCount << endl; InputTree parse; cin >> parse; //cerr << parse << endl; if(!cin) break; if(parse.length() == 0) break; EcSPairs wtList; parse.make(wtList); InputTree* par; par = &parse; addWwData(par); incrWordData(s1Int, s1lex); ur.gatherData(par); sentenceCount++; } ECString resultsString(path); resultsString += "pSgT.txt"; ofstream resultsStream(resultsString.c_str()); assert(resultsStream); int numWords = 0; resultsStream << " \n"; //leave space for number of words; resultsStream.precision(3); ECString lastWord; int wordFreq = 0; WordMap::iterator wmi = wordMap.begin(); resultsStream << wordMap.size() << "\n\n"; for( ; wmi != wordMap.end() ; wmi++) { ECString w = (*wmi).first; resultsStream << w << "\t"; PosD& posd = (*wmi).second; PosD::iterator pdi = posd.begin(); int count = 0; for( ; pdi != posd.end(); pdi++) { int posInt = (*pdi).first; int c = (*pdi).second; count += c; float p = (float)c/(float)numTerm[posInt]; resultsStream << posInt << " " << p << " "; } resultsStream << "| " << count << "\n"; } ur.setData(path); return 1; }
int main(int argc, char *argv[]) { struct rlimit core_limits; core_limits.rlim_cur = 0; core_limits.rlim_max = 0; setrlimit( RLIMIT_CORE, &core_limits ); ECArgs args( argc, argv ); assert(args.nargs() == 2); conditionedType = args.arg(0); cerr << "start trainRs: " << conditionedType << endl; ECString path( args.arg( 1 ) ); if(args.isset('L')) Feature::setLM(); Term::init(path); readHeadInfo(path); Pst pst(path); if(Feature::isLM) ClassRule::readCRules(path); addSubFeatureFns(); Feature::init(path, conditionedType); whichInt = Feature::whichInt; int ceFunInt = Feature::conditionedFeatureInt[Feature::whichInt]; Feature::conditionedEvent = SubFeature::Funs[ceFunInt]; Feat::Usage = PARSE; ECString ftstr(path); ftstr += conditionedType; ftstr += ".g"; ifstream fts(ftstr.c_str()); if(!fts) { cerr << "Could not find " << ftstr << endl; assert(fts); } tRoot = new FeatureTree(fts); //puts it in root; cout.precision(3); cerr.precision(3); lamInit(); InputTree* trainingData[1001]; int usedCount = 0; sentenceCount = 0; for( ; ; sentenceCount++) { if(sentenceCount%10000 == 1) { // cerr << conditionedType << ".tr " //<< sentenceCount << endl; } if(usedCount >= 1000) break; InputTree* correct = new InputTree; cin >> (*correct); if(correct->length() == 0) break; if(!cin) break; EcSPairs wtList; correct->make(wtList); InputTree* par; par = correct; trainingData[usedCount++] = par; } if(Feature::isLM) pickLogBases(trainingData,sentenceCount); procGSwitch = true; for(pass = 0 ; pass < 10 ; pass++) { if(pass%2 == 1) cout << "Pass " << pass << endl; goThroughSents(trainingData, sentenceCount); updateLambdas(); //printLambdas(cout); zeroData(); } ECString resS(path); resS += conditionedType; resS += ".lambdas"; ofstream res(resS.c_str()); res.precision(3); printLambdas(res); printLambdas(cout); cout << "Total params = " << FeatureTree::totParams << endl; cout << "Done: " << (int)sbrk(0) << endl; }
int main(int argc, char *argv[]) { struct rlimit core_limits; core_limits.rlim_cur = 0; core_limits.rlim_max = 0; setrlimit( RLIMIT_CORE, &core_limits ); ECArgs args( argc, argv ); assert(args.nargs() == 2); if(args.isset('N')) numGram = atoi(args.value('N').c_str()); Feature::setLM(); if(args.isset('L')) Term::Language = args.value('L'); string path( args.arg( 1 ) ); if(Term::Language == "Ch") readHeadInfoCh(path); else readHeadInfo(path); string conditionedType( args.arg(0) ); cerr << "start kn3Counts " << conditionedType << endl; int minCount = 1; if(args.isset('m')) minCount = atoi(args.value('m').c_str()); Feat::Usage = KNCOUNTS; FeatureTree::minCount = minCount; Term::init(path); readHeadInfo(path); Pst pst(path); addSubFeatureFns(); Feature::assignCalc(conditionedType); FeatureTree::root() = new FeatureTree(); Feature::init(path, conditionedType); int wI = Feature::whichInt; int ceFunInt = Feature::conditionedFeatureInt[wI]; Feature::conditionedEvent = SubFeature::Funs[ceFunInt]; string trainingString( path ); int sentenceCount = 0; for( ; ; sentenceCount++) { if(sentenceCount%10000 == 1) { cerr << "rCounts " << sentenceCount << endl; } InputTree correct; cin >> correct; //if(sentenceCount > 1000) break; if(correct.length() == 0) break; //cerr <<sentenceCount << correct << endl; EcSPairs wtList; correct.make(wtList); InputTree* par; int strt = 0; par = &correct; makeSent(par); curS = par; gatherFfCounts(par, 0); if(wI == TTCALC || wI == WWCALC) { list<InputTree*> dummy2; InputTree stopInputTree(par->finish(),par->finish(), wI==TTCALC ? "" : "^^", "STOP","", dummy2,NULL,NULL); stopInputTree.headTree() = &stopInputTree; TreeHist treeh(&stopInputTree,0); treeh.hpos = 0; callProcG(&treeh); } } finalProbComputation(); string resS(path); resS += conditionedType; resS += ".g"; ofstream res(resS.c_str()); assert(res); FTreeMap& fts = FeatureTree::root()->subtree; FTreeMap::iterator fti = fts.begin(); for( ; fti != fts.end() ; fti++) { int asVal = (*fti).first; (*fti).second->printFTree(asVal, res); } res.close(); cout << "Tot words: " << totWords << endl; cout << "Total params for " << conditionedType << " = " << FeatureTree::totParams << endl; }
int main(int argc, char *argv[]) { ECArgs args( argc, argv ); /* o = basic, but not debugging, output. l = length of sentence to be proceeds 0-40 is default n = work on each #'th line. d = print out debugging info at level # W = use wwclasses R = use rwclasses t = report timings (requires o) s = maximum sleep time f = f# says multiply ctl2 counts by # p = p# use prepFactor # P = which types of prob models to use */ // prevent core file creation; struct rlimit core_limits; core_limits.rlim_cur = 0; core_limits.rlim_max = 0; setrlimit( RLIMIT_CORE, &core_limits ); params.init( args ); if(args.isset('s')) { int maxDelay = atoi(args.value('s').c_str()); srand(params.whichSent()); int randN = rand(); int delay = randN%maxDelay; sleep(delay); } if(args.isset('T')) { int fac = atoi(args.value('T').c_str()); float ffac = (float)fac; ffac /= 10; Bchart::timeFactor = ffac; } int maxSentLen = 70; if(args.isset('l')) { maxSentLen = atoi(args.value('l').c_str()); } int totEdges = 0; int totPopedEdges = 0; double totAccessTime = 0; double totParseTime = 0; double totSemParseTime = 0; clock_t lastTime, currTime; double lastTimeSec, currTimeSec, elapsedTime; endFactor = 1.2; midFactor = (1.0 - (.3684 * endFactor))/(1.0 - .3684); if( args.nargs() > 2 || args.nargs() == 0 ) // require path name error( "Need exactly two arg." ); ECString path( args.arg( 0 ) ); readHeadInfo(path); Term::init( path ); InputTree::init(); ECString testSString( args.arg(1) ); ewDciTokStrm testSStream(testSString); //ifstream testSStream(testSString.c_str()); if( !testSStream ) error( "No testSstream" ); int sentenceCount = 0; //counts all sentences so we can use 1/50; ECString probSumString( path ); probSumString += "pSgT.txt"; ifstream probSumStream( probSumString.c_str() ); if( !probSumStream ) error( "Failed to find probSum file" ); Bchart::readTermProbs(path); if( args.isset('d') ) { int lev = atoi(args.value('d').c_str()); Bchart::printDebug() = lev; } int totSents = 0; int totUnparsed = 0; MeChart::init(path); Bchart::setPosStarts(); for( ; !(!testSStream) ; ) { SentRep sr(testSStream, SentRep::SGML); int len = sr.length(); if(len == 0) continue; if(len > maxSentLen) continue; if( !params.field().in(sentenceCount) ) { sentenceCount++; continue; } if(len == 1) { if(sr[0].lexeme() == "</DOC>") { continue; } } sentenceCount++; //SentRep orgsr( wtList ); // used in precision calc; if( args.isset('t') ) lastTime = clock(); if(args.isset('t') ) { currTime = clock(); lastTimeSec = (double)lastTime/(double)CLOCKS_PER_SEC; currTimeSec = (double)currTime/(double)CLOCKS_PER_SEC; elapsedTime = currTimeSec - lastTimeSec; if(elapsedTime < 0) elapsedTime += 2147; cerr << "Reading data time = " << elapsedTime << endl; totAccessTime += elapsedTime; lastTime = currTime; } MeChart* chart = new MeChart( sr ); curChart = chart; chart->ruleCountTimeout() = 250000; totSents++; if(args.isset('t') ) lastTime = clock(); double tmpCrossEnt = chart->parse( ); Item* topS = chart->topS(); if(!topS) { if(len == 1) { delete chart; continue; } Edge::DemFac = .9; delete chart; chart = new MeChart(sr); chart->ruleCountTimeout() = 350000; curChart = chart; tmpCrossEnt = chart->parse( ); topS = chart->topS(); Edge::DemFac = .999; if(!topS) { totUnparsed++; cerr << "Parse failed on: " << sr << endl; delete chart; continue; } } // compute the outside probabilities on the items so that we can // skip doing detailed computations on the really bad ones if(args.isset('t') ) { currTime = clock(); lastTimeSec = (double)lastTime/(double)CLOCKS_PER_SEC; currTimeSec = (double)currTime/(double)CLOCKS_PER_SEC; elapsedTime = currTimeSec - lastTimeSec; if(elapsedTime < 0) elapsedTime += 2147; cerr << "Parsing time = " << elapsedTime << "\tEdges created = " << chart->totEdgeCountAtS() << "\tEdges poped = " << chart->popedEdgeCountAtS() << endl; totParseTime += elapsedTime; //totEdges += chart->totEdgeCountAtS(); //totPopedEdges += chart->popedEdgeCountAtS(); totEdges += chart->totEdgeCountAtS(); totPopedEdges += chart->popedEdgeCountAtS(); lastTime = clock(); } chart->set_Alphas(); AnswerTree* at = chart->findMapParse(); if( !at ) { totUnparsed++; cerr << "MapParse failed on: " << sr << endl; delete chart; continue; } InputTree* mapparse = inputTreeFromAnswerTree(at,topS); //at->deleteSubTrees(); //delete at; cout << *mapparse << endl; delete mapparse; if(args.isset('t') ) { currTime = clock(); lastTimeSec = (double)lastTime/(double)CLOCKS_PER_SEC; currTimeSec = (double)currTime/(double)CLOCKS_PER_SEC; elapsedTime = currTimeSec - lastTimeSec; if(elapsedTime < 0) elapsedTime += 2147; cerr << "Sem Parsing time = " << elapsedTime << endl; totSemParseTime += elapsedTime; } delete chart; } if( args.isset('t') ) cout << "Av access time = " << totAccessTime/totSents << "\t Av parse time = " << totParseTime/totSents << "\t Av stats time = " << totSemParseTime/totSents << "\nAv edges created = " << (float)totEdges/totSents << "\tAv edges poped = " << (float)totPopedEdges/totSents << endl; return 0; }
int main(int argc, char *argv[]) { ECArgs args( argc, argv ); assert(args.nargs() == 1); ECString path(args.arg(0)); cerr << "At start of pSfgt" << endl; for(int n = 0 ; n < 140 ; n++) numTerm[n] = 0; ECString resultsString(path); resultsString += "endings.txt"; Term::init( path ); if(args.isset('L')) Term::Language = args.value('L'); readHeadInfo(path); Pst pst(path); //???; int sentenceCount = 0; int wordCount = 0; int processedCount = 0; /*int i, j; for(i = 0 ; i < 60 ; i++) for(j = 0 ; j < 30 ; j++) data[i][j] = 0; */ int i = 0; while(cin) { if(i++%5000 == 1) cerr << i << endl; InputTree parse; cin >> parse; if(!cin) break; if(parse.length() == 0 && cin) continue; if(parse.length()==0 ||!cin) break; addWwData(&parse); processedCount++; wordCount += parse.length(); } ofstream resultsStream(resultsString.c_str()); assert(resultsStream); /*int totNt[30]; for(i = 0 ; i < 30 ; i++) totNt[i] = 0; for(i = 0 ; i <= Term::lastTagInt() ; i++) { for(j = 0 ; j < (Term::lastNTInt() - Term::lastTagInt()) ; j++) totNt[j] += data[i][j]; } */ resultsStream << numEndings << "\n"; for(i = 0 ; i < 140 ; i++) { endMap::iterator emi = endData[i].begin(); for( ; emi != endData[i].end() ; emi++) { ECString ending = (*emi).first; int cnt = (*emi).second; resultsStream << i << "\t" << ending << "\t" << (float) cnt / (float) numTerm[i] << endl; //<< "\n"; } } cout<<"totol sentence:"<<processedCount<<endl; cout<<"total suffix:"<<numEndings<<endl; return 0; }
int main(int argc, char *argv[]) { ECArgs args( argc, argv ); assert(args.nargs() == 1); ECString path(args.arg(0)); cerr << "At start of pTgNt" << endl; for(int n = 0 ; n < MAXNUMTS ; n++) numTerm[n] = 0; ECString resultsString(path); resultsString += "endings.txt"; Term::init( path ); if(args.isset('L')) Term::Language = args.value('L'); readHeadInfo(path); Pst pst(path); int sentenceCount = 0; int wordCount = 0; int processedCount = 0; int i, j; for(i = 0 ; i < MAXNUMTS ; i++) for(j = 0 ; j < MAXNUMNTS ; j++) data[i][j] = 0; i = 0; while(cin) { if(i%10000 == 0) cerr << i << endl; //if(i > 1000) break; InputTree parse; cin >> parse; if(!cin) break; if(parse.length() == 0) break; const Term* resTerm = addWwData(&parse); processedCount++; wordCount += parse.length(); i++; } ofstream resultsStream(resultsString.c_str()); assert(resultsStream); int totNt[MAXNUMTS]; for(i = 0 ; i < MAXNUMTS ; i++) totNt[i] = 0; for(i = 0 ; i <= Term::lastTagInt() ; i++) { for(j = 0 ; j < (Term::lastNTInt() - Term::lastTagInt()) ; j++) totNt[j] += data[i][j]; } resultsStream << numEndings << "\n"; for(i = 0 ; i < MAXNUMTS ; i++) { endMap::iterator emi = endData[i].begin(); for( ; emi != endData[i].end() ; emi++) { ECString ending = (*emi).first; int cnt = (*emi).second; resultsStream << i << "\t" << ending << "\t" << (float) cnt / (float) numTerm[i] << endl; //<< "\n"; } } return 0; }
int main(int argc, char *argv[]) { ECArgs args( argc, argv ); ECString path(args.arg(0)); cerr << "At start of pUgT" << endl; Term::init( path ); if(args.isset('L')) Term::Language = args.value('L'); readHeadInfo(path); Pst pst(path); int sentenceCount = 0; int i, j; for(i = 0 ; i < MAXNUMTS ; i++) { posCounts[i] = 0; posCapCounts[i] = 0; posDenoms[i] = 0; posUCounts[i] = 0; posDashCounts[i] = 0; } for(i = 0 ; i < MAXNUMTS ; i++) totCounts[i] = 0; i = 0; for( ; ; ) { if(i++%10000 == 1) cerr << i << endl; //if(i > 1000) break; InputTree parse; cin >> parse; //cerr << parse << endl; if(parse.length() == 0) break; if(!cin) break; curSent = &parse; addWwData(&parse); sentenceCount++; } ECString resultsString(path); resultsString += "pUgT.txt"; ofstream resultsStream(resultsString.c_str()); assert(resultsStream); /* we print out p(unknown|tag) p(Capital|tag) p(hasDash|tag, unknown) note for Capital the denom is different because we ignore the first two words of the sentence */ int nm = Term::lastTagInt()+1; for(i = 0 ; i < nm ; i++) { resultsStream << i << "\t"; float pugt = 0; float pudenom = (float)posDenoms[i]; if(pudenom > 0) pugt = (float)posUCounts[i]/pudenom; resultsStream << pugt << "\t"; if(posCounts[i] == 0) resultsStream << 0 << "\t"; else resultsStream << (float) posCapCounts[i]/ (float)posCounts[i] << "\t"; if(posUCounts[i] == 0) resultsStream << 0; else resultsStream << (float)posDashCounts[i]/posUCounts[i] ; resultsStream << endl; } ECString resultsString2(path); resultsString2 += "nttCounts.txt"; ofstream resultsStream2(resultsString2.c_str()); assert(resultsStream2); for(i = 0 ; i <= Term::lastNTInt() ; i++) { resultsStream2 << i << "\t"; resultsStream2 << totCounts[i] << "\n"; } return 0; }
bool HJ1QVProcReader::read_by_scene(const char* filename, const char* outPath) { int last_pos = 0; ifstream fs; fs.open(filename, std::ios_base::binary); if(!fs) { cerr<<"open error!"<<endl; return false; } fs.seekg(0, ios::end); LONGLONG file_size = fs.tellg(); fs.seekg(0, ios::beg); // 读QUIVIMAGE_HEAD_INFO QUIVIMAGE_HEAD_INFO theHeaderInfo; readHeadInfo(fs, theHeaderInfo); theHeaderInfo.band_num = theHeaderInfo.gray_image_flag?1:3; int lineDataByte = theHeaderInfo.band_num * theHeaderInfo.data_width * theHeaderInfo.sample_bit_count/8; theHeaderInfo.line_num = (file_size-180)/(92+lineDataByte); theHeaderInfo.sample_num = theHeaderInfo.data_width; //writeDescXML(theHeaderInfo, (strPath+"\\desc.xml").c_str()); int percent = 0; printf("\r%d%%", percent); fflush(stdout); int sceneSize = 1*theHeaderInfo.data_width; const int OVERLAP = 0; int nScenes=(int)(theHeaderInfo.line_num / sceneSize); if(nScenes * sceneSize < theHeaderInfo.line_num ) nScenes =nScenes +1; //fstream ephFile; //ephFile.open((strPath+"\\eph.txt").c_str(), std::ios_base::out); //ephFile.close(); //fstream nadirsFile; //nadirsFile.open((strPath+"\\nadirs.txt").c_str(), std::ios_base::out); //nadirsFile.close(); //GDALDatasetH hDstDS = new GDALDatasetH; //const char *pszFormat = "GTiff"; //char *pszTargetSRS = NULL; //GDALDriverH hDriver = GDALGetDriverByName( pszFormat ); GDALDataType eDT; if (16 == theHeaderInfo.sample_bit_count) { eDT = GDALDataType::GDT_Int16; }else { eDT = GDALDataType::GDT_Byte; } vector<int> panBandMap; for (int i=0;i < theHeaderInfo.band_num;++i) { panBandMap.push_back(i+1); } vector<QUIVIMAGE_AUX_INFO> auxList; GByte *pBuf = NULL; int iLine = 0; while (!fs.eof()) { // 读取星历参数 streampos pos_current = fs.tellg(); if ((long long)pos_current + 92 + lineDataByte > file_size) { break; } QUIVIMAGE_AUX_INFO aux; readQuivAuxInfo(fs, aux); #if 0 if (1 == aux.valid_flag) { // 如果星历参数有效 //ephFile.open((strPath+"\\eph.txt").c_str(), std::ios_base::app); //ephFile<<iLine+1<<" " // <<aux.satpos.x<<" " // <<aux.satpos.y<<" " // <<aux.satpos.z<<" " // <<aux.satpos.vx<<" " // <<aux.satpos.vy<<" " // <<aux.satpos.vz<<" " // <<aux.satatt.roll<<" " // <<aux.satatt.pitch<<" " // <<aux.satatt.yaw<<endl; //ephFile.close(); //if (0 != timeCompare(lastTime, aux.line_time)) //{ //int centerLine = int((lastLine+iLine)*0.5+0.5); ephFile.open((strPath+"\\eph.txt").c_str(), std::ios_base::app); //ephFile<<centerLine+1-1792<<" " //ephFile<<centerLine+1-1831<<" " ephFile << iLine << " " <<aux.satpos.x<<" " <<aux.satpos.y<<" " <<aux.satpos.z<<" " <<aux.satpos.vx<<" " <<aux.satpos.vy<<" " <<aux.satpos.vz<<" " <<aux.satatt.roll<<" " <<aux.satatt.pitch<<" " <<aux.satatt.yaw<<" " <<aux.nadir_pos.latitude<<" " <<aux.nadir_pos.longitude<<" " <<time2String(aux.line_time)<<endl; ephFile.close(); // lastTime = aux.line_time; // start_line = iLine; //} //lastLine = iLine; } #else if (0 == aux.valid_flag) { if (iLine == 0 || aux.line_count == 0) { //if (aux.satpos.x == 0 && aux.satpos.y == 0 && aux.satpos.y ==0) //{ // // 无效行 // continue; //} aux.line_num = iLine - aux.line_count; if (aux.satpos.x == 0 && aux.satpos.y == 0 && aux.satpos.y ==0) { //iLine++; //// 无效行 //continue; } else { auxList.push_back(aux); } //int line_num; //line_num = iLine - aux.line_count;// - line_offset; //ephFile.open((strPath+"\\eph.txt").c_str(), std::ios_base::app); //ephFile<<line_num<<" " // <<setprecision(25) // <<aux.satpos.x<<" " // <<aux.satpos.y<<" " // <<aux.satpos.z<<" " // <<aux.satpos.vx<<" " // <<aux.satpos.vy<<" " // <<aux.satpos.vz<<" " // <<aux.satatt.roll<<" " // <<aux.satatt.pitch<<" " // <<aux.satatt.yaw<<" " // <<aux.satatt.vroll<<" " // <<aux.satatt.vpitch<<" " // <<aux.satatt.vyaw<<" " // <<aux.gps_time<<endl; //ephFile.close(); //double lat, lon, alt; //ecef2lla(aux.satpos.x, aux.satpos.y, aux.satpos.z, lat, lon, alt); //nadirsFile.open((strPath+"\\nadirs.txt").c_str(), std::ios_base::app); //nadirsFile<<line_num<<" " // <<lat<<" " // <<lon<<endl; //nadirsFile.close(); } } #endif // 跳过数据区 fs.seekg(pos_current + streampos(92 + lineDataByte), ios_base::beg); // 计算当前进度 streampos pos_ = fs.tellg(); //读取文件指针的位置 int tmpPercent = (int)(pos_ / (double)(file_size)* 100 + 0.5); if (tmpPercent > percent) { percent = tmpPercent; printf("\r%d%%", percent); fflush(stdout); } iLine++; //int iScene = iLine / sceneSize; //int iWidth = theHeaderInfo.sample_num; //int iHeight = sceneSize; //int line_offset = iScene * sceneSize; //// 末尾景 //if (iScene == nScenes - 1) //{ // iHeight = theHeaderInfo.line_num - line_offset; //} //if (iScene*sceneSize == iLine) //{ // char buf[1024]; // sprintf_s(buf, "%s\\Scene%02d", strPath.c_str(), iScene+1); // if (!QDir(buf).exists()) // { // _mkdir(buf); // } // string sceneDir(buf); // string sceneImage = sceneDir + "\\IMAGE.TIF"; // hDstDS = GDALCreate( hDriver, sceneImage.c_str(), iWidth, iHeight, theHeaderInfo.band_num, eDT, NULL ); // pBuf = new GByte[iWidth*iHeight*theHeaderInfo.band_num]; //} ////cout<<fs.tellg()<<endl; //if (file_size - fs.tellg() < lineDataByte || iLine == theHeaderInfo.line_num-1) //{ // ((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth, iHeight, pBuf, iWidth, iHeight, eDT, theHeaderInfo.band_num, &panBandMap[0],theHeaderInfo.band_num, lineDataByte, 1); // //int iBand = 1; // //int band1_offset = 3; // //((GDALDataset*)hDstDS)->RasterIO(GF_Write,band1_offset,0,iWidth - band1_offset, iHeight, pBuf, iWidth-band1_offset, iHeight, eDT, 1, &iBand, theHeaderInfo.band_num, lineDataByte, 1); // //iBand = 2; // //int band2_offset = 4; // //((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth, iHeight-band2_offset, pBuf+lineDataByte*band2_offset, iWidth, iHeight-band2_offset, eDT, 1, &iBand,theHeaderInfo.band_num, lineDataByte, 1); // ////((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,band2_offset,iWidth, iHeight-band2_offset, pBuf, iWidth, iHeight-band2_offset, eDT, 1, &iBand,theHeaderInfo.band_num, lineDataByte, 1); // //iBand = 3; // //((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth, iHeight, pBuf, iWidth, iHeight, eDT, 1, &iBand, theHeaderInfo.band_num, lineDataByte, 1); // GDALClose( hDstDS ); // delete[]pBuf; // pBuf = NULL; // //CPLFree( pBuf ); // break; //} //int npos = fs.tellg(); //fs.read((char*)(pBuf+(iLine-iScene*sceneSize)*iWidth*theHeaderInfo.band_num), lineDataByte); //if (iLine-iScene*sceneSize == iHeight-1) //{ // ((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth, iHeight, pBuf, iWidth, iHeight, eDT, theHeaderInfo.band_num, &panBandMap[0],theHeaderInfo.band_num, lineDataByte, 1); // //int iBand = 1; // //int band1_offset = 1; // ////((GDALDataset*)hDstDS)->RasterIO(GF_Write,band1_offset,0,iWidth - band1_offset, iHeight, pBuf, iWidth-band1_offset, iHeight, eDT, 1, &iBand, theHeaderInfo.band_num, lineDataByte, 1); // //((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth - band1_offset, iHeight, pBuf+theHeaderInfo.band_num*band1_offset, iWidth-band1_offset, iHeight, eDT, 1, &iBand, theHeaderInfo.band_num, lineDataByte, 1); // //iBand = 2; // //int band2_offset = 2; // //((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth, iHeight-band2_offset, pBuf+lineDataByte*band2_offset, iWidth, iHeight-band2_offset, eDT, 1, &iBand,theHeaderInfo.band_num, lineDataByte, 1); // ////((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,band2_offset,iWidth, iHeight-band2_offset, pBuf, iWidth, iHeight-band2_offset, eDT, 1, &iBand,theHeaderInfo.band_num, lineDataByte, 1); // //iBand = 3; // //((GDALDataset*)hDstDS)->RasterIO(GF_Write,0,0,iWidth, iHeight, pBuf, iWidth, iHeight, eDT, 1, &iBand, theHeaderInfo.band_num, lineDataByte, 1); // GDALClose(hDstDS); // delete[]pBuf; // pBuf = NULL; // //CPLFree( pBuf ); //} //iLine++; //// 计算当前进度 //streampos pos_ = fs.tellg(); //读取文件指针的位置 //int tmpPercent = (int)(pos_ / (double)(file_size) * 100 + 0.5); //if(tmpPercent > percent) //{ // percent = tmpPercent; // printf("\r%d%%", percent); //} } fs.close(); writeDescXML(theHeaderInfo, auxList, outPath); //// write nScenes aux files respectively //for (int iScene = 0;iScene < nScenes;++iScene) //{ // char buf[1024]; // sprintf_s(buf, "%s\\Scene%02d", strPath.c_str(), iScene+1); // if (!QDir(buf).exists()) // { // _mkdir(buf); // } // string sceneDir(buf); // // int iWidth = theHeaderInfo.sample_num; // int iHeight = sceneSize; // int line_offset = iScene * sceneSize; // // 末尾景 // if (iScene == nScenes - 1) // { // iHeight = theHeaderInfo.line_num - line_offset; // } // QUIVIMAGE_HEAD_INFO sceneHeaderInfo = theHeaderInfo; // sceneHeaderInfo.line_num = iHeight; // writeDescXML(sceneHeaderInfo, (sceneDir+"\\desc.xml").c_str()); // int overlap_lines = 3000; // fstream inEphFile; // inEphFile.open((strPath+"\\eph.txt").c_str(), std::ios_base::in); // fstream outEphFile; // outEphFile.open((sceneDir+"\\eph.txt").c_str(), std::ios_base::out); // char ephLine[2048]; // while (inEphFile.getline(ephLine, 2048)) // { // int scene_line = atoi(SBeforeFirst(string(ephLine), ' ').c_str()) - line_offset; // if (scene_line > -overlap_lines && scene_line < sceneSize + overlap_lines) // { // outEphFile<<scene_line<<" " // <<SAfterFirst(string(ephLine), ' ')<<endl; // } // } // inEphFile.close(); // outEphFile.close(); // fstream inNadirFile; // inNadirFile.open((strPath+"\\nadirs.txt").c_str(), std::ios_base::in); // fstream outNadirFile; // outNadirFile.open((sceneDir+"\\nadirs.txt").c_str(), std::ios_base::out); // char nadirLine[2048]; // while (inNadirFile.getline(nadirLine, 2048)) // { // int scene_line = atoi(SBeforeFirst(string(nadirLine), ' ').c_str()) - line_offset; // if (scene_line > -overlap_lines && scene_line < sceneSize + overlap_lines) // { // outNadirFile<<scene_line<<" " // <<SAfterFirst(string(nadirLine), ' ')<<endl; // } // } // inNadirFile.close(); // outNadirFile.close(); // writeDescXML(theHeaderInfo, auxList, (sceneDir+"\\desc.xml").c_str(), line_offset); //} printf("\r%d%%\n", 100); fflush(stdout); return true; }