/*---------------------------------------------------------------------------*/ FEATURE_SET ReadFeatureSet(FILE *File, const FEATURE_DESC_STRUCT* FeatureDesc) { /* ** Parameters: ** File open text file to read new feature set from ** FeatureDesc specifies type of feature to read from File ** Globals: none ** Operation: Create a new feature set of the specified type and read in ** the features from File. The correct text representation ** for a feature set is an integer which specifies the number (N) ** of features in a set followed by a list of N feature ** descriptions. ** Return: New feature set read from File. ** Exceptions: none ** History: Wed May 23 09:17:31 1990, DSJ, Created. */ FEATURE_SET FeatureSet; int NumFeatures; int i; if (fscanf (File, "%d", &NumFeatures) != 1 || NumFeatures < 0) DoError (ILLEGAL_NUM_FEATURES, "Illegal number of features in set"); FeatureSet = NewFeatureSet (NumFeatures); for (i = 0; i < NumFeatures; i++) AddFeature (FeatureSet, ReadFeature (File, FeatureDesc)); return (FeatureSet); } /* ReadFeatureSet */
INT_PTR CALLBACK ProcessorDlgProc (HWND hDlg, UINT uMessage, WPARAM wParam, LPARAM lParam) { switch (uMessage) { case WM_INITDIALOG: { WCHAR szFeatures[MAX_PATH] = L""; WCHAR szModel[3]; WCHAR szStepping[3]; WCHAR szCurrentMhz[10]; BOOL bFirst = TRUE; SYSTEM_INFO SystemInfo; PROCESSOR_POWER_INFORMATION PowerInfo; if (IsProcessorFeaturePresent(PF_MMX_INSTRUCTIONS_AVAILABLE)) AddFeature(szFeatures, sizeof(szFeatures), L"MMX", &bFirst); if (IsProcessorFeaturePresent(PF_XMMI_INSTRUCTIONS_AVAILABLE)) AddFeature(szFeatures, sizeof(szFeatures), L"SSE", &bFirst); if (IsProcessorFeaturePresent(PF_XMMI64_INSTRUCTIONS_AVAILABLE)) AddFeature(szFeatures, sizeof(szFeatures), L"SSE2", &bFirst); /*if (IsProcessorFeaturePresent(PF_SSE3_INSTRUCTIONS_AVAILABLE)) AddFeature(szFeatures, sizeof(szFeatures), L"SSE3", &bFirst); */ if (IsProcessorFeaturePresent(PF_3DNOW_INSTRUCTIONS_AVAILABLE)) AddFeature(szFeatures, sizeof(szFeatures), L"3DNOW", &bFirst); SetDlgItemTextW(hDlg, IDC_FEATURES, szFeatures); GetSystemInfo(&SystemInfo); StringCbPrintfW(szModel, sizeof(szModel), L"%x", HIBYTE(SystemInfo.wProcessorRevision)); StringCbPrintfW(szStepping, sizeof(szStepping), L"%d", LOBYTE(SystemInfo.wProcessorRevision)); SetDlgItemTextW(hDlg, IDC_MODEL, szModel); SetDlgItemTextW(hDlg, IDC_STEPPING, szStepping); CallNtPowerInformation(11, NULL, 0, &PowerInfo, sizeof(PowerInfo)); StringCbPrintfW(szCurrentMhz, sizeof(szCurrentMhz), L"%ld %s", PowerInfo.CurrentMhz, L"MHz"); SetDlgItemTextW(hDlg, IDC_CORESPEED, szCurrentMhz); return TRUE; } } return FALSE; }
/** * Create a new feature set of the specified type and read in * the features from File. The correct text representation * for a feature set is an integer which specifies the number (N) * of features in a set followed by a list of N feature * descriptions. * @param File open text file to read new feature set from * @param FeatureDesc specifies type of feature to read from File * @return New feature set read from File. */ FEATURE_SET ReadFeatureSet(FILE* File, const FEATURE_DESC_STRUCT* FeatureDesc) { int NumFeatures; ASSERT_HOST(tfscanf(File, "%d", &NumFeatures) == 1); ASSERT_HOST(NumFeatures >= 0); FEATURE_SET FeatureSet = NewFeatureSet(NumFeatures); for (int i = 0; i < NumFeatures; i++) AddFeature(FeatureSet, ReadFeature (File, FeatureDesc)); return FeatureSet; }
/*---------------------------------------------------------------------------*/ void ConvertSegmentToPicoFeat(FPOINT *Start, FPOINT *End, FEATURE_SET FeatureSet) { /* ** Parameters: ** Start starting point of pico-feature ** End ending point of pico-feature ** FeatureSet set to add pico-feature to ** Globals: ** classify_pico_feature_length length of a single pico-feature ** Operation: This routine converts an entire segment of an outline ** into a set of pico features which are added to ** FeatureSet. The length of the segment is rounded to the ** nearest whole number of pico-features. The pico-features ** are spaced evenly over the entire segment. ** Return: none (results are placed in FeatureSet) ** Exceptions: none ** History: Tue Apr 30 15:44:34 1991, DSJ, Created. */ FEATURE Feature; FLOAT32 Angle; FLOAT32 Length; int NumFeatures; FPOINT Center; FPOINT Delta; int i; Angle = NormalizedAngleFrom (Start, End, 1.0); Length = DistanceBetween (*Start, *End); NumFeatures = (int) floor (Length / classify_pico_feature_length + 0.5); if (NumFeatures < 1) NumFeatures = 1; /* compute vector for one pico feature */ Delta.x = XDelta (*Start, *End) / NumFeatures; Delta.y = YDelta (*Start, *End) / NumFeatures; /* compute position of first pico feature */ Center.x = Start->x + Delta.x / 2.0; Center.y = Start->y + Delta.y / 2.0; /* compute each pico feature in segment and add to feature set */ for (i = 0; i < NumFeatures; i++) { Feature = NewFeature (&PicoFeatDesc); Feature->Params[PicoFeatDir] = Angle; Feature->Params[PicoFeatX] = Center.x; Feature->Params[PicoFeatY] = Center.y; AddFeature(FeatureSet, Feature); Center.x += Delta.x; Center.y += Delta.y; } } /* ConvertSegmentToPicoFeat */
/*---------------------------------------------------------------------------*/ FEATURE_SET ExtractMicros(TBLOB* Blob, const DENORM& cn_denorm) { /* ** Parameters: ** Blob blob to extract micro-features from ** denorm control parameter to feature extractor. ** Globals: none ** Operation: Call the old micro-feature extractor and then copy ** the features into the new format. Then deallocate the ** old micro-features. ** Return: Micro-features for Blob. ** Exceptions: none ** History: Wed May 23 18:06:38 1990, DSJ, Created. */ int NumFeatures; MICROFEATURES Features, OldFeatures; FEATURE_SET FeatureSet; FEATURE Feature; MICROFEATURE OldFeature; OldFeatures = BlobMicroFeatures(Blob, cn_denorm); if (OldFeatures == NULL) return NULL; NumFeatures = count (OldFeatures); FeatureSet = NewFeatureSet (NumFeatures); Features = OldFeatures; iterate(Features) { OldFeature = (MICROFEATURE) first_node (Features); Feature = NewFeature (&MicroFeatureDesc); Feature->Params[MFDirection] = OldFeature[ORIENTATION]; Feature->Params[MFXPosition] = OldFeature[XPOSITION]; Feature->Params[MFYPosition] = OldFeature[YPOSITION]; Feature->Params[MFLength] = OldFeature[MFLENGTH]; // Bulge features are deprecated and should not be used. Set to 0. Feature->Params[MFBulge1] = 0.0f; Feature->Params[MFBulge2] = 0.0f; #ifndef _WIN32 // Assert that feature parameters are well defined. int i; for (i = 0; i < Feature->Type->NumParams; i++) { ASSERT_HOST(!isnan(Feature->Params[i])); } #endif AddFeature(FeatureSet, Feature); } FreeMicroFeatures(OldFeatures); return FeatureSet; } /* ExtractMicros */
void SequenceFeatures::AddTrigramFeatures(SequenceInstanceNumeric *sentence, int position) { CHECK(!input_features_trigrams_[position]) << position << " " << sentence->size(); BinaryFeatures *features = new BinaryFeatures; input_features_trigrams_[position] = features; uint64_t fkey; uint8_t flags = 0x0; flags |= SequenceFeatureTemplateParts::TRIGRAM; // Bias feature. fkey = encoder_.CreateFKey_NONE(SequenceFeatureTemplateTrigram::BIAS, flags); AddFeature(fkey, features); }
OGRFeature* wxGISFeatureDataset::GetAt(int nIndex) { wxASSERT(nIndex >= 0); // while(nIndex + 1 > m_OGRFeatureArray.size()) { size_t count(0); OGRFeature *poFeature; while( (count < CACHE_SIZE) && (poFeature = m_poLayer->GetNextFeature()) != NULL) { AddFeature(poFeature); count++; } } return m_OGRFeatureArray[nIndex]; }
/** Function for loading a map from a file **/ int CMapSP::Load(std::ifstream& file) { Clear(); int nmaps = 0; // number of maps int numbersp = 0; // number of objects int numbercp = 0; // number of control points of an object char str[20]; file.get(str, 20, ' '); file >> numbersp; int i, j; for (i = 0; i < numbersp; i++) { CBspline* spline = new CBspline; file.get(str, 20, ' '); file >> numbercp; spline->m_iNumber = numbercp; // T float value; for (j = 0; j < numbercp+4; j++) { file.get(str, 20, ' '); file >> value; spline->m_Knots.push_back(value); if (j==0) spline->m_fRange[0] = value; if (j==numbercp+3) spline->m_fRange[1] = value; } // X for (j = 0; j < numbercp; j++) { file.get(str, 20, ' '); file >> value; spline->m_Bx.push_back(value); } // Y for (j = 0; j < numbercp; j++) { file.get(str, 20, ' '); file >> value; spline->m_By.push_back(value); } AddFeature(spline); } return 0; }
wxGISFeatureSet* wxGISFeatureDataset::GetFeatureSet(IQueryFilter* pQFilter /* = NULL */, ITrackCancel* pTrackCancel /* = NULL */) { if (m_OGRFeatureArray.size() < GetSize()) { OGRFeature* poFeature; while((poFeature = m_poLayer->GetNextFeature()) != NULL ) { if (pTrackCancel && !pTrackCancel->Continue()) return NULL; AddFeature(poFeature); } } wxGISFeatureSet* pGISFeatureSet = new wxGISFeatureSet(m_OGRFeatureArray.size()); if (pQFilter) { wxGISSpatialFilter* pSpaFil = dynamic_cast<wxGISSpatialFilter*>(pQFilter); if (pSpaFil && m_pQuadTree) { int count(0); OGREnvelope Env = pSpaFil->GetEnvelope(); CPLRectObj Rect = {Env.MinX, Env.MinY, Env.MaxX, Env.MaxY}; OGRFeature** pFeatureArr = (OGRFeature**)CPLQuadTreeSearch(m_pQuadTree, &Rect, &count); for (int i = 0; i < count; i++) { if(pTrackCancel && !pTrackCancel->Continue()) break; pGISFeatureSet->AddFeature(pFeatureArr[i]); } delete [] pFeatureArr; } } else { for (size_t i = 0; i < m_OGRFeatureArray.size(); i++) { if(pTrackCancel && !pTrackCancel->Continue()) break; pGISFeatureSet->AddFeature(m_OGRFeatureArray[i]); } } return pGISFeatureSet; }
/*---------------------------------------------------------------------------*/ FEATURE_SET ExtractCharNormFeatures(TBLOB *Blob, LINE_STATS *LineStats) { /* ** Parameters: ** Blob blob to extract char norm feature from ** LineStats statistics on text row blob is in ** Globals: none ** Operation: Compute a feature whose parameters describe how a ** character will be affected by the character normalization ** algorithm. The feature parameters are: ** y position of center of mass in baseline coordinates ** total length of outlines in baseline coordinates ** divided by a scale factor ** radii of gyration about the center of mass in ** baseline coordinates ** Return: Character normalization feature for Blob. ** Exceptions: none ** History: Wed May 23 18:06:38 1990, DSJ, Created. */ FEATURE_SET FeatureSet; FEATURE Feature; FLOAT32 Scale; FLOAT32 Baseline; LIST Outlines; INT_FEATURE_ARRAY blfeatures; INT_FEATURE_ARRAY cnfeatures; INT_FX_RESULT_STRUCT FXInfo; /* allocate the feature and feature set - note that there is always one and only one char normalization feature for any blob */ FeatureSet = NewFeatureSet (1); Feature = NewFeature (&CharNormDesc); AddFeature(FeatureSet, Feature); /* compute the normalization statistics for this blob */ Outlines = ConvertBlob (Blob); /*---------Debug--------------------------------------------------* OFile = fopen ("f:/ims/debug/nfOutline.logCPP", "r"); if (OFile == NULL) { OFile = Efopen ("f:/ims/debug/nfOutline.logCPP", "w"); WriteOutlines(OFile, Outlines); } else { fclose (OFile); OFile = Efopen ("f:/ims/debug/nfOutline.logCPP", "a"); } WriteOutlines(OFile, Outlines); fclose (OFile); *--------------------------------------------------------------------*/ ExtractIntFeat(Blob, blfeatures, cnfeatures, &FXInfo); Baseline = BaselineAt (LineStats, FXInfo.Xmean); Scale = ComputeScaleFactor (LineStats); Feature->Params[CharNormY] = (FXInfo.Ymean - Baseline) * Scale; Feature->Params[CharNormLength] = FXInfo.Length * Scale / LENGTH_COMPRESSION; Feature->Params[CharNormRx] = FXInfo.Rx * Scale; Feature->Params[CharNormRy] = FXInfo.Ry * Scale; /*---------Debug--------------------------------------------------* File = fopen ("f:/ims/debug/nfFeatSet.logCPP", "r"); if (File == NULL) { File = Efopen ("f:/ims/debug/nfFeatSet.logCPP", "w"); WriteFeatureSet(File, FeatureSet); } else { fclose (File); File = Efopen ("f:/ims/debug/nfFeatSet.logCPP", "a"); } WriteFeatureSet(File, FeatureSet); fclose (File); *--------------------------------------------------------------------*/ FreeOutlines(Outlines); return (FeatureSet); } /* ExtractCharNormFeatures */
void GlobalLexicalModelUnlimited::Evaluate(const Hypothesis& cur_hypo, ScoreComponentCollection* accumulator) const { const Sentence& input = *(m_local->input); const TargetPhrase& targetPhrase = cur_hypo.GetCurrTargetPhrase(); for(size_t targetIndex = 0; targetIndex < targetPhrase.GetSize(); targetIndex++ ) { StringPiece targetString = targetPhrase.GetWord(targetIndex).GetString(0); // TODO: change for other factors if (m_ignorePunctuation) { // check if first char is punctuation char firstChar = targetString[0]; CharHash::const_iterator charIterator = m_punctuationHash.find( firstChar ); if(charIterator != m_punctuationHash.end()) continue; } if (m_biasFeature) { stringstream feature; feature << "glm_"; feature << targetString; feature << "~"; feature << "**BIAS**"; accumulator->SparsePlusEquals(feature.str(), 1); } boost::unordered_set<uint64_t> alreadyScored; for(size_t sourceIndex = 0; sourceIndex < input.GetSize(); sourceIndex++ ) { const StringPiece sourceString = input.GetWord(sourceIndex).GetString(0); // TODO: change for other factors if (m_ignorePunctuation) { // check if first char is punctuation char firstChar = sourceString[0]; CharHash::const_iterator charIterator = m_punctuationHash.find( firstChar ); if(charIterator != m_punctuationHash.end()) continue; } const uint64_t sourceHash = util::MurmurHashNative(sourceString.data(), sourceString.size()); if ( alreadyScored.find(sourceHash) == alreadyScored.end()) { bool sourceExists, targetExists; if (!m_unrestricted) { sourceExists = FindStringPiece(m_vocabSource, sourceString ) != m_vocabSource.end(); targetExists = FindStringPiece(m_vocabTarget, targetString) != m_vocabTarget.end(); } // no feature if vocab is in use and both words are not in restricted vocabularies if (m_unrestricted || (sourceExists && targetExists)) { if (m_sourceContext) { if (sourceIndex == 0) { // add <s> trigger feature for source stringstream feature; feature << "glm_"; feature << targetString; feature << "~"; feature << "<s>,"; feature << sourceString; accumulator->SparsePlusEquals(feature.str(), 1); alreadyScored.insert(sourceHash); } // add source words to the right of current source word as context for(int contextIndex = sourceIndex+1; contextIndex < input.GetSize(); contextIndex++ ) { StringPiece contextString = input.GetWord(contextIndex).GetString(0); // TODO: change for other factors bool contextExists; if (!m_unrestricted) contextExists = FindStringPiece(m_vocabSource, contextString ) != m_vocabSource.end(); if (m_unrestricted || contextExists) { stringstream feature; feature << "glm_"; feature << targetString; feature << "~"; feature << sourceString; feature << ","; feature << contextString; accumulator->SparsePlusEquals(feature.str(), 1); alreadyScored.insert(sourceHash); } } } else if (m_biphrase) { // --> look backwards for constructing context int globalTargetIndex = cur_hypo.GetSize() - targetPhrase.GetSize() + targetIndex; // 1) source-target pair, trigger source word (can be discont.) and adjacent target word (bigram) StringPiece targetContext; if (globalTargetIndex > 0) targetContext = cur_hypo.GetWord(globalTargetIndex-1).GetString(0); // TODO: change for other factors else targetContext = "<s>"; if (sourceIndex == 0) { StringPiece sourceTrigger = "<s>"; AddFeature(accumulator, sourceTrigger, sourceString, targetContext, targetString); } else for(int contextIndex = sourceIndex-1; contextIndex >= 0; contextIndex-- ) { StringPiece sourceTrigger = input.GetWord(contextIndex).GetString(0); // TODO: change for other factors bool sourceTriggerExists = false; if (!m_unrestricted) sourceTriggerExists = FindStringPiece(m_vocabSource, sourceTrigger ) != m_vocabSource.end(); if (m_unrestricted || sourceTriggerExists) AddFeature(accumulator, sourceTrigger, sourceString, targetContext, targetString); } // 2) source-target pair, adjacent source word (bigram) and trigger target word (can be discont.) StringPiece sourceContext; if (sourceIndex-1 >= 0) sourceContext = input.GetWord(sourceIndex-1).GetString(0); // TODO: change for other factors else sourceContext = "<s>"; if (globalTargetIndex == 0) { string targetTrigger = "<s>"; AddFeature(accumulator, sourceContext, sourceString, targetTrigger, targetString); } else for(int globalContextIndex = globalTargetIndex-1; globalContextIndex >= 0; globalContextIndex-- ) { StringPiece targetTrigger = cur_hypo.GetWord(globalContextIndex).GetString(0); // TODO: change for other factors bool targetTriggerExists = false; if (!m_unrestricted) targetTriggerExists = FindStringPiece(m_vocabTarget, targetTrigger ) != m_vocabTarget.end(); if (m_unrestricted || targetTriggerExists) AddFeature(accumulator, sourceContext, sourceString, targetTrigger, targetString); } } else if (m_bitrigger) { // allow additional discont. triggers on both sides int globalTargetIndex = cur_hypo.GetSize() - targetPhrase.GetSize() + targetIndex; if (sourceIndex == 0) { StringPiece sourceTrigger = "<s>"; bool sourceTriggerExists = true; if (globalTargetIndex == 0) { string targetTrigger = "<s>"; bool targetTriggerExists = true; if (m_unrestricted || (sourceTriggerExists && targetTriggerExists)) AddFeature(accumulator, sourceTrigger, sourceString, targetTrigger, targetString); } else { // iterate backwards over target for(int globalContextIndex = globalTargetIndex-1; globalContextIndex >= 0; globalContextIndex-- ) { StringPiece targetTrigger = cur_hypo.GetWord(globalContextIndex).GetString(0); // TODO: change for other factors bool targetTriggerExists = false; if (!m_unrestricted) targetTriggerExists = FindStringPiece(m_vocabTarget, targetTrigger ) != m_vocabTarget.end(); if (m_unrestricted || (sourceTriggerExists && targetTriggerExists)) AddFeature(accumulator, sourceTrigger, sourceString, targetTrigger, targetString); } } } // iterate over both source and target else { // iterate backwards over source for(int contextIndex = sourceIndex-1; contextIndex >= 0; contextIndex-- ) { StringPiece sourceTrigger = input.GetWord(contextIndex).GetString(0); // TODO: change for other factors bool sourceTriggerExists = false; if (!m_unrestricted) sourceTriggerExists = FindStringPiece(m_vocabSource, sourceTrigger ) != m_vocabSource.end(); if (globalTargetIndex == 0) { string targetTrigger = "<s>"; bool targetTriggerExists = true; if (m_unrestricted || (sourceTriggerExists && targetTriggerExists)) AddFeature(accumulator, sourceTrigger, sourceString, targetTrigger, targetString); } else { // iterate backwards over target for(int globalContextIndex = globalTargetIndex-1; globalContextIndex >= 0; globalContextIndex-- ) { StringPiece targetTrigger = cur_hypo.GetWord(globalContextIndex).GetString(0); // TODO: change for other factors bool targetTriggerExists = false; if (!m_unrestricted) targetTriggerExists = FindStringPiece(m_vocabTarget, targetTrigger ) != m_vocabTarget.end(); if (m_unrestricted || (sourceTriggerExists && targetTriggerExists)) AddFeature(accumulator, sourceTrigger, sourceString, targetTrigger, targetString); } } } } } else { stringstream feature; feature << "glm_"; feature << targetString; feature << "~"; feature << sourceString; accumulator->SparsePlusEquals(feature.str(), 1); alreadyScored.insert(sourceHash); } } } } } }
void SequenceFeatures::AddUnigramFeatures(SequenceInstanceNumeric *sentence, int position) { CHECK(!input_features_unigrams_[position]); BinaryFeatures *features = new BinaryFeatures; input_features_unigrams_[position] = features; int sentence_length = sentence->size(); // Array of form IDs. const vector<int>* word_ids = &sentence->GetFormIds(); // Words. uint16_t WID = (*word_ids)[position]; // Current word. // Word on the left. uint16_t pWID = (position > 0)? (*word_ids)[position - 1] : TOKEN_START; // Word on the right. uint16_t nWID = (position < sentence_length - 1)? (*word_ids)[position + 1] : TOKEN_STOP; // Word two positions on the left. uint16_t ppWID = (position > 1)? (*word_ids)[position - 2] : TOKEN_START; // Word two positions on the right. uint16_t nnWID = (position < sentence_length - 2)? (*word_ids)[position + 2] : TOKEN_STOP; // Prefixes/Suffixes. vector<uint16_t> AID(sentence->GetMaxPrefixLength(position), 0xffff); vector<uint16_t> ZID(sentence->GetMaxSuffixLength(position), 0xffff); for (int l = 0; l < AID.size(); ++l) { AID[l] = sentence->GetPrefixId(position, l+1); } for (int l = 0; l < ZID.size(); ++l) { ZID[l] = sentence->GetSuffixId(position, l+1); } // Several flags. uint8_t flag_digit = sentence->HasDigit(position)? 0x1 : 0x0; uint8_t flag_upper = position > 0 && sentence->HasUpper(position)? 0x1 : 0x0; uint8_t flag_hyphen = sentence->HasHyphen(position)? 0x1 : 0x0; flag_digit = 0x0 | (flag_digit << 4); flag_upper = 0x1 | (flag_upper << 4); flag_hyphen = 0x2 | (flag_hyphen << 4); uint64_t fkey; uint8_t flags = 0x0; flags |= SequenceFeatureTemplateParts::UNIGRAM; // Maximum is 255 feature templates. CHECK_LT(SequenceFeatureTemplateUnigram::COUNT, 256); // Bias feature. fkey = encoder_.CreateFKey_NONE(SequenceFeatureTemplateUnigram::BIAS, flags); AddFeature(fkey, features); // Lexical features. fkey = encoder_.CreateFKey_W(SequenceFeatureTemplateUnigram::W, flags, WID); AddFeature(fkey, features); fkey = encoder_.CreateFKey_W(SequenceFeatureTemplateUnigram::pW, flags, pWID); AddFeature(fkey, features); fkey = encoder_.CreateFKey_W(SequenceFeatureTemplateUnigram::nW, flags, nWID); AddFeature(fkey, features); fkey = encoder_.CreateFKey_W(SequenceFeatureTemplateUnigram::ppW, flags, ppWID); AddFeature(fkey, features); fkey = encoder_.CreateFKey_W(SequenceFeatureTemplateUnigram::nnW, flags, nnWID); AddFeature(fkey, features); // Prefix/Suffix features. for (int l = 0; l < AID.size(); ++l) { uint8_t flag_prefix_length = l; fkey = encoder_.CreateFKey_WP(SequenceFeatureTemplateUnigram::A, flags, AID[l], flag_prefix_length); AddFeature(fkey, features); } for (int l = 0; l < ZID.size(); ++l) { uint8_t flag_suffix_length = l; fkey = encoder_.CreateFKey_WP(SequenceFeatureTemplateUnigram::Z, flags, ZID[l], flag_suffix_length); AddFeature(fkey, features); } // Several flags. fkey = encoder_.CreateFKey_P(SequenceFeatureTemplateUnigram::FLAG, flags, flag_digit); AddFeature(fkey, features); fkey = encoder_.CreateFKey_P(SequenceFeatureTemplateUnigram::FLAG, flags, flag_upper); AddFeature(fkey, features); fkey = encoder_.CreateFKey_P(SequenceFeatureTemplateUnigram::FLAG, flags, flag_hyphen); AddFeature(fkey, features); }
OGRGeoJSONLayer* OGRGeoJSONReader::ReadLayer( const char* pszName, OGRGeoJSONDataSource* poDS ) { CPLAssert( NULL == poLayer_ ); if( NULL == poGJObject_ ) { CPLDebug( "GeoJSON", "Missing parset GeoJSON data. Forgot to call Parse()?" ); return NULL; } poLayer_ = new OGRGeoJSONLayer( pszName, NULL, OGRGeoJSONLayer::DefaultGeometryType, NULL, poDS ); if( !GenerateLayerDefn() ) { CPLError( CE_Failure, CPLE_AppDefined, "Layer schema generation failed." ); delete poLayer_; return NULL; } /* -------------------------------------------------------------------- */ /* Translate single geometry-only Feature object. */ /* -------------------------------------------------------------------- */ GeoJSONObject::Type objType = OGRGeoJSONGetType( poGJObject_ ); if( GeoJSONObject::ePoint == objType || GeoJSONObject::eMultiPoint == objType || GeoJSONObject::eLineString == objType || GeoJSONObject::eMultiLineString == objType || GeoJSONObject::ePolygon == objType || GeoJSONObject::eMultiPolygon == objType || GeoJSONObject::eGeometryCollection == objType ) { OGRGeometry* poGeometry = NULL; poGeometry = ReadGeometry( poGJObject_ ); if( !AddFeature( poGeometry ) ) { CPLDebug( "GeoJSON", "Translation of single geometry failed." ); delete poLayer_; return NULL; } } /* -------------------------------------------------------------------- */ /* Translate single but complete Feature object. */ /* -------------------------------------------------------------------- */ else if( GeoJSONObject::eFeature == objType ) { OGRFeature* poFeature = NULL; poFeature = ReadFeature( poGJObject_ ); if( !AddFeature( poFeature ) ) { CPLDebug( "GeoJSON", "Translation of single feature failed." ); delete poLayer_; return NULL; } } /* -------------------------------------------------------------------- */ /* Translate multi-feature FeatureCollection object. */ /* -------------------------------------------------------------------- */ else if( GeoJSONObject::eFeatureCollection == objType ) { OGRGeoJSONLayer* poThisLayer = NULL; poThisLayer = ReadFeatureCollection( poGJObject_ ); CPLAssert( poLayer_ == poThisLayer ); } else { CPLError( CE_Failure, CPLE_AppDefined, "Unrecognized GeoJSON structure." ); delete poLayer_; return NULL; } OGRSpatialReference* poSRS = NULL; poSRS = OGRGeoJSONReadSpatialReference( poGJObject_ ); if (poSRS == NULL ) { // If there is none defined, we use 4326 poSRS = new OGRSpatialReference(); if( OGRERR_NONE != poSRS->importFromEPSG( 4326 ) ) { delete poSRS; poSRS = NULL; } poLayer_->SetSpatialRef( poSRS ); delete poSRS; } else { poLayer_->SetSpatialRef( poSRS ); delete poSRS; } // TODO: FeatureCollection return poLayer_; }
Paddle::Paddle(): _velocity(0.f) { AddFeature(new Dimension()); AddFeature(new Collidable(this)); };
RESULTCODE CsvImportProcessorImpl::Execute() { GError *pError = augeGetErrorInstance(); GLogger *pLogger = augeGetLoggerInstance(); char csvDrv[AUGE_DRV_MAX]; char csvDir[AUGE_PATH_MAX]; char csvName[AUGE_NAME_MAX]; char csvPath[AUGE_PATH_MAX]; char constr[AUGE_PATH_MAX]; memset(csvDrv, 0, AUGE_DRV_MAX); memset(csvDir, 0, AUGE_PATH_MAX); memset(csvName, 0, AUGE_NAME_MAX); memset(csvPath, 0, AUGE_PATH_MAX); memset(constr,0,AUGE_PATH_MAX); auge_split_path(m_csv_path.c_str(), csvDrv, csvDir,csvName,NULL); auge_make_path(csvPath, csvDrv,csvDir,NULL,NULL); g_sprintf(constr,"DATABASE=%s",csvPath); DataEngine* pDataEngine = NULL; DataEngineManager* pDataEngineManager = augeGetDataEngineManagerInstance(); pDataEngine = pDataEngineManager->GetEngine("CsvFile"); if(pDataEngine==NULL) { pLogger->Error(pError->GetLastError(),__FILE__,__LINE__); return AG_FAILURE; } RESULTCODE rc = AG_FAILURE; Workspace* pcsvWorkspace = pDataEngine->CreateWorkspace(); pcsvWorkspace->SetConnectionString(constr); rc = pcsvWorkspace->Open(); if(rc!=AG_SUCCESS) { pLogger->Error(pError->GetLastError(),__FILE__,__LINE__); pcsvWorkspace->Release(); return AG_FAILURE; } FeatureWorkspace* pobjWorkspace = NULL; ConnectionManager* pConnectionManager = augeGetConnectionManagerInstance(); pobjWorkspace = dynamic_cast<FeatureWorkspace*>(pConnectionManager->GetWorkspace(m_user, m_source_name.c_str())); if(pobjWorkspace==NULL) { pLogger->Error(pError->GetLastError(),__FILE__,__LINE__); pcsvWorkspace->Release(); return AG_FAILURE; } FeatureClass* pFeatureClass = NULL; pFeatureClass = pobjWorkspace->OpenFeatureClass(m_dataset_name.c_str()); if(pFeatureClass==NULL) { pLogger->Error(pError->GetLastError(),__FILE__,__LINE__); pcsvWorkspace->Release(); return AG_FAILURE; } AttributeDataSet* pcsvDataset = static_cast<AttributeDataSet*>(pcsvWorkspace->OpenDataSet(csvName)); GFields* pcsvFields = pcsvDataset->GetFields(); GFields* pobjFields = pFeatureClass->GetFields(); if(!IsMatch(pcsvFields,pobjFields)) { const char* msg = "Field is not matched"; pError->SetError(msg); pLogger->Error(msg, __FILE__, __LINE__); pFeatureClass->Release(); pcsvDataset->Release(); pcsvWorkspace->Release(); return AG_FAILURE; } FeatureInsertCommand* cmd = pFeatureClass->CreateInsertCommand(); Row* pRow = NULL; Cursor* pCursor = pcsvDataset->GetRows(); while((pRow=pCursor->NextRow())!=NULL) { AddFeature(pRow, pFeatureClass,cmd); pRow->Release(); } cmd->Commit(); cmd->Release(); pCursor->Release(); pFeatureClass->Refresh(); pFeatureClass->Release(); pcsvDataset->Release(); pcsvWorkspace->Release(); return AG_SUCCESS; }
About::About(QWidget *parent) : QDialog(parent) { ppl6::CString Tmp,DateFormat,Date; ui.setupUi(this); ppl6::Cppl6Core *core=ppl6::PPLInit(); this->setWindowTitle(tr("About WinMusik")); //ui.note->setText(tr("This program makes use of the following Open Source Libraries:")); Tmp="<div align=\"center\" style=\"font-size: 12pt;\"><b>"+tr("License:")+"</b></div>\n"; Tmp.Concatf("%s<p>\n",WM_COPYRIGHT); Tmp+=tr("GPL3 short"); Tmp+="<div align=\"center\" style=\"font-size: 12pt;\"><b>"+tr("Credits:")+"</b></div>\n"; Tmp+=tr("This program makes use of the following Open Source Libraries:"); Tmp.Concat("<ul>"); Tmp.Concatf("<li>PPL %s, %s</li>\n",(const char*)core->GetVersion(), (const char*)core->GetCopyright()); Tmp.Concatf("<li>Qt %s, Copyright (C) 2012 Digia Plc",qVersion()); AddFeature(Tmp,"zlib"); AddFeature(Tmp,"bzip2"); AddFeature(Tmp,"pcre"); AddFeature(Tmp,"iconv"); //AddFeature(Tmp,"lame"); AddFeature(Tmp,"openssl"); AddFeature(Tmp,"curl"); //AddFeature(Tmp,"libmad"); Tmp.Concat("</li>\n"); Tmp.Concat("</ul>\n"); Tmp+=tr("Some graphics and icons are made with Lightwave 3D 9.6 from NewTek and Paint Shop Pro 7.0 from Jasc Software. "); Tmp+=" "; Tmp+=tr("Some icons are taken from the Crystal Project from Everaldo.com."); ui.libs->setTextFormat(Qt::RichText); ui.libs->setText(Tmp); Tmp=tr("WinMusik Version %v% build %b% vom %d%"); Tmp.Replace("%v%",WM_VERSION); Tmp.Replace("%b%",WM_SVN_REVISION); DateFormat=tr("%d.%m.%Y"); int a,year,month,day; a=WM_RELEASEDATE; year=a/10000; a=a-year*10000; month=a/100; day=a-month*100; ppluint64 t=ppl6::MkTime(year,month,day); ppl6::MkDate(Date,DateFormat,t); Tmp.Replace("%d%",(const char*)Date); ui.wmVersion->setText(Tmp); Tmp=WM_COPYRIGHT; ui.wmCopyright->setText(Tmp); }
Ball::Ball(): SPEED_INCREMENT(20.f), OSCILLATION_FUDGE(20.f), _heading(0.f), _speed(230.f) { AddFeature(new Dimension()); AddFeature(new Collidable(this)); }
void OGRILI1Layer::PolygonizeAreaLayer() { if (poAreaLineLayer == 0) return; //add all lines from poAreaLineLayer to collection OGRGeometryCollection *gc = new OGRGeometryCollection(); poAreaLineLayer->ResetReading(); while (OGRFeature *feature = poAreaLineLayer->GetNextFeatureRef()) gc->addGeometry(feature->GetGeometryRef()); //polygonize lines CPLDebug( "OGR_ILI", "Polygonizing layer %s with %d multilines", poAreaLineLayer->GetLayerDefn()->GetName(), gc->getNumGeometries()); OGRMultiPolygon* polys = Polygonize( gc , false); CPLDebug( "OGR_ILI", "Resulting polygons: %d", polys->getNumGeometries()); if (polys->getNumGeometries() != poAreaReferenceLayer->GetFeatureCount()) { CPLDebug( "OGR_ILI", "Feature count of layer %s: %d", poAreaReferenceLayer->GetLayerDefn()->GetName(), GetFeatureCount()); CPLDebug( "OGR_ILI", "Polygonizing again with crossing line fix"); delete polys; polys = Polygonize( gc, true ); //try again with crossing line fix } delete gc; //associate polygon feature with data row according to centroid #if defined(HAVE_GEOS) int i; OGRPolygon emptyPoly; GEOSGeom *ahInGeoms = NULL; CPLDebug( "OGR_ILI", "Associating layer %s with area polygons", GetLayerDefn()->GetName()); ahInGeoms = (GEOSGeom *) CPLCalloc(sizeof(void*),polys->getNumGeometries()); for( i = 0; i < polys->getNumGeometries(); i++ ) { ahInGeoms[i] = polys->getGeometryRef(i)->exportToGEOS(); if (!GEOSisValid(ahInGeoms[i])) ahInGeoms[i] = NULL; } poAreaReferenceLayer->ResetReading(); while (OGRFeature *feature = poAreaReferenceLayer->GetNextFeatureRef()) { GEOSGeom point = (GEOSGeom)feature->GetGeometryRef()->exportToGEOS(); for (i = 0; i < polys->getNumGeometries(); i++ ) { if (ahInGeoms[i] && GEOSWithin(point, ahInGeoms[i])) { OGRFeature* areaFeature = feature->Clone(); areaFeature->SetGeometry( polys->getGeometryRef(i) ); AddFeature(areaFeature); break; } } if (i == polys->getNumGeometries()) { CPLDebug( "OGR_ILI", "Association between area and point failed."); feature->SetGeometry( &emptyPoly ); } GEOSGeom_destroy( point ); } for( i = 0; i < polys->getNumGeometries(); i++ ) GEOSGeom_destroy( ahInGeoms[i] ); CPLFree( ahInGeoms ); #endif poAreaReferenceLayer = 0; poAreaLineLayer = 0; }
SMFMap::SMFMap(std::string smfname) { std::vector<ILuint> tiles_images; std::vector<std::string> tile_files; metalmap = NULL; heightmap = NULL; typemap = NULL; minimap = NULL; vegetationmap = NULL; m_tiles = NULL; FILE * smffile = fopen(smfname.c_str(),"rb"); if ( !smffile ) { throw CannotLoadSmfFileException(); } SMFHeader hdr; fread(&hdr,sizeof(hdr),1,smffile); if ( strncmp(hdr.magic,"spring map file",15) > 0 ) { fclose(smffile); throw InvalidSmfFileException(); } mapx = hdr.mapx; mapy = hdr.mapy; m_minh = hdr.minHeight; m_maxh = hdr.maxHeight; m_smfname = smfname; m_doclamp = true; m_th = 0; m_comptype = COMPRESS_REASONABLE; m_smooth = false; texture = new Image(); texture->AllocateRGBA((mapx/128)*1024,(mapy/128)*1024); std::cout << "Loading metal map..." << std::endl; metalmap = new Image(); metalmap->AllocateLUM(mapx/2,mapy/2); fseek(smffile,hdr.metalmapPtr,SEEK_SET); fread(metalmap->datapointer,mapx/2*mapy/2,1,smffile); std::cout << "Loading heightmap..." << std::endl; heightmap = new Image(); heightmap->AllocateLUM(mapx+1,mapy+1); heightmap->ConvertToLUMHDR();//TODO: Allocate directly HDR fseek(smffile,hdr.heightmapPtr,SEEK_SET); fread(heightmap->datapointer,(mapx+1)*(mapy+1)*2,1,smffile); heightmap->FlipVertical(); std::cout << "Loading type map..." << std::endl; typemap = new Image(); typemap->AllocateLUM(mapx/2,mapy/2); fseek(smffile,hdr.typeMapPtr,SEEK_SET); fread(typemap->datapointer,mapx/2*mapy/2,1,smffile); typemap->FlipVertical(); std::cout << "Loading minimap..." << std::endl; minimap = new Image(); uint8_t * dxt1data = new uint8_t[699064]; fseek(smffile,hdr.minimapPtr,SEEK_SET); fread(dxt1data,699064,1,smffile); ilBindImage(minimap->image); ilTexImageDxtc(1024,1024,1,IL_DXT1,dxt1data); ilDxtcDataToImage(); std::cout << "Extracting main texture..." << std::endl; int *tilematrix = new int[mapx/4 * mapy/4]; fseek(smffile,hdr.tilesPtr,SEEK_SET); MapTileHeader thdr; fread(&thdr,sizeof(thdr),1,smffile); while ( tile_files.size() < thdr.numTileFiles ) { tile_files.push_back(""); char byte; int numtiles; fread(&numtiles,4,1,smffile); fread(&byte,1,1,smffile); while ( byte != 0 ) { tile_files[tile_files.size()-1].append(1,byte); fread(&byte,1,1,smffile); } } for ( std::vector<std::string>::iterator it = tile_files.begin(); it != tile_files.end(); it++ ) { std::cout << "Opening " << *it << std::endl; FILE* smtfile = fopen((*it).c_str(),"rb"); if ( !smtfile ) { fclose(smffile); delete [] tilematrix; throw CannotOpenSmtFileException(); } TileFileHeader smthdr; fread(&smthdr,sizeof(smthdr),1,smtfile); if ( strncmp(smthdr.magic,"spring tilefile",14) ) { fclose(smffile); fclose(smtfile); delete [] tilematrix; throw InvalidSmtFileException(); } for ( int i = 0; i < smthdr.numTiles; i++ ) { ILuint tile = ilGenImage(); fread(dxt1data,680,1,smtfile); ilBindImage(tile); ilTexImageDxtc(32,32,1,IL_DXT1,dxt1data); ilDxtcDataToImage(); tiles_images.push_back(tile); } fclose(smtfile); } std::cout << "Tiles @ " << ftell(smffile) << std::endl; fread(tilematrix,mapx/4 * mapy/4 * 4,1,smffile); ilBindImage(texture->image); unsigned int * texdata = (unsigned int *)ilGetData(); std::cout << "Blitting tiles..." << std::endl; for ( int y = 0; y < mapy/4; y++ ) { std::cout << "Row " << y << " of " << mapy/4 << std::endl; for ( int x = 0; x < mapx/4; x++ ) { if ( tilematrix[y*(mapx/4)+x] >= tiles_images.size() ) { std::cerr << "Warning: tile " << tilematrix[y*(mapx/4)+x] << " out of range" << std::endl; continue; } //ilBlit(tiles_images[tilematrix[y*(mapx/4)+x]],x*32,y*32,0,0,0,0,32,32,1); ilBindImage(tiles_images[tilematrix[y*(mapx/4)+x]]); unsigned int * data = (unsigned int *)ilGetData(); int r2 = 0; for ( int y2 = y*32; y2 < y*32+32; y2++ )//FAST blitting { /*for ( int x2 = y*32; x2 < y*32+32; x2++ ) { }*/ memcpy(&texdata[y2*texture->w+x*32],&data[r2*32],32*4); r2++; } } } texture->FlipVertical(); std::cout << "Loading features..." << std::endl; fseek(smffile,hdr.featurePtr,SEEK_SET); MapFeatureHeader mfhdr; fread(&mfhdr,sizeof(mfhdr),1,smffile); //-32767.0f+f->rotation/65535.0f*360 std::vector<std::string> feature_types; while ( feature_types.size() < mfhdr.numFeatureType ) { feature_types.push_back(""); char byte; fread(&byte,1,1,smffile); while ( byte != 0 ) { feature_types[feature_types.size()-1].append(1,byte); fread(&byte,1,1,smffile); } } for ( int i = 0; i < mfhdr.numFeatures; i++ ) { MapFeatureStruct f; fread(&f,sizeof(f),1,smffile); if ( f.featureType >= feature_types.size() ) { std::cerr << "Warning: invalid feature type " << f.featureType << std::endl; continue; } AddFeature(feature_types[f.featureType],f.xpos,f.ypos,f.zpos,-32767.0f+f.rotation/65535.0f*360); } fclose(smffile); delete [] dxt1data; delete [] tilematrix; }
bool wxGISFeatureDataset::Open(int iLayer) { if(m_bIsOpened) return true; wxCriticalSectionLocker locker(m_CritSect); m_poDS = OGRSFDriverRegistrar::Open( wgWX2MB(m_sPath.c_str()), FALSE ); if ( m_poDS == NULL) { const char* err = CPLGetLastErrorMsg(); wxString sErr = wxString::Format(_("wxGISFeatureDataset: Open failed! Path '%s'. OGR error: %s"), m_sPath.c_str(), wgMB2WX(err)); wxLogError(sErr); wxMessageBox(sErr, _("Error"), wxOK | wxICON_ERROR); return false; } m_poLayer = m_poDS->GetLayer(iLayer); if (m_poLayer) { m_psExtent = new OGREnvelope(); if (m_poLayer->GetExtent(m_psExtent, true) != OGRERR_NONE) { wxDELETE(m_psExtent); m_psExtent = NULL; } bool bOLCFastSpatialFilter = m_poLayer->TestCapability(OLCFastSpatialFilter); if (!bOLCFastSpatialFilter) { if (m_psExtent) { OGREnvelope Env = *m_psExtent; CPLRectObj Rect = {Env.MinX, Env.MinY, Env.MaxX, Env.MaxY}; m_pQuadTree = CPLQuadTreeCreate(&Rect, GetFeatureBoundsFunc); } // // // } // // m_OGRFeatureArray.reserve(m_poLayer->GetFeatureCount(true)); size_t count(0); OGRFeature *poFeature; while ( (count < CACHE_SIZE) && ((poFeature = m_poLayer->GetNextFeature()) != NULL) ) { AddFeature(poFeature); count++; } // // // } m_bIsOpened = true; return true; }
// Imports spatial contexts and feature schemas from XML to datastore. void FdoImportExportTest::Import( FdoIConnection* connection, FdoIoStream* stream, FdoXmlSpatialContextFlags* flags, FdoBoolean importSC, FdoBoolean importSchemas ) { FdoPtr<FdoITransaction> featureTransaction; stream->Reset(); // Import the Spatial Contexts if ( importSC ) { FdoXmlSpatialContextSerializer::XmlDeserialize( connection, FdoXmlSpatialContextReaderP( FdoXmlSpatialContextReader::Create( FdoXmlReaderP( FdoXmlReader::Create(stream) ), flags ) ), flags ); stream->Reset(); } // Import the Schema Override sets. FdoSchemaMappingsP mappings = FdoPhysicalSchemaMappingCollection::Create(); stream->Reset(); mappings->ReadXml( stream ); // Import the Feature Schemas if ( importSchemas ) { FdoFeatureSchemasP schemas = FdoFeatureSchemaCollection::Create(NULL); // Deserialize the feature schemas stream->Reset(); schemas->ReadXml( stream ); // Add each feature schema to the datastore. FdoPtr<FdoIApplySchema> applyCmd = (FdoIApplySchema*) connection->CreateCommand(FdoCommandType_ApplySchema); for ( int idx = 0; idx < schemas->GetCount(); idx++ ) { FdoFeatureSchemaP schema = schemas->GetItem(idx); applyCmd->SetFeatureSchema( schema ); FdoPhysicalSchemaMappingP overrides = (FdoPhysicalSchemaMapping*) mappings->GetItem( connection, schema->GetName() ); if ( overrides ) applyCmd->SetPhysicalMapping( overrides ); applyCmd->Execute(); } FdoFeatureSchemaP insSchema = schemas->FindItem( L"Schema1" ); if ( insSchema ) { FdoClassDefinitionP insClass = FdoClassesP(insSchema->GetClasses())->FindItem( L"ClassB1" ); if ( insClass ) { featureTransaction = connection->BeginTransaction(); FdoPtr<FdoIInsert> insertCommand = (FdoIInsert *) connection->CreateCommand(FdoCommandType_Insert); insertCommand->SetFeatureClassName(insClass->GetQualifiedName()); AddFeature( insertCommand, 1 ); AddFeature( insertCommand, 2 ); AddFeature( insertCommand, 3 ); featureTransaction->Commit(); } } } }
bool wxGISFeatureDataset::Open(int iLayer) { if(m_bIsOpened) return true; wxCriticalSectionLocker locker(m_CritSect); m_poDS = OGRSFDriverRegistrar::Open( wgWX2MB(m_sPath.c_str()), FALSE ); if( m_poDS == NULL ) { const char* err = CPLGetLastErrorMsg(); wxString sErr = wxString::Format(_("wxGISFeatureDataset: Open failed! Path '%s'. OGR error: %s"), m_sPath.c_str(), wgMB2WX(err)); wxLogError(sErr); wxMessageBox(sErr, _("Error"), wxOK | wxICON_ERROR); return false; } m_poLayer = m_poDS->GetLayer(iLayer); if(m_poLayer) { m_psExtent = new OGREnvelope(); if(m_poLayer->GetExtent(m_psExtent, true) != OGRERR_NONE) { wxDELETE(m_psExtent); m_psExtent = NULL; } bool bOLCFastSpatialFilter = m_poLayer->TestCapability(OLCFastSpatialFilter); if(!bOLCFastSpatialFilter) { if(m_psExtent) { OGREnvelope Env = *m_psExtent; CPLRectObj Rect = {Env.MinX, Env.MinY, Env.MaxX, Env.MaxY}; m_pQuadTree = CPLQuadTreeCreate(&Rect, GetFeatureBoundsFunc); } //wxFileName FileName(m_sPath); //wxString SQLStatement = wxString::Format(wxT("CREATE SPATIAL INDEX ON %s"), FileName.GetName().c_str()); //m_poDS->ExecuteSQL(wgWX2MB(SQLStatement), NULL, NULL); } // bool bOLCRandomRead = pOGRLayer->TestCapability(OLCRandomRead); // bool bOLCSequentialWrite = pOGRLayer->TestCapability(OLCSequentialWrite); // bool bOLCRandomWrite = pOGRLayer->TestCapability(OLCRandomWrite); // bool bOLCFastFeatureCount = pOGRLayer->TestCapability(OLCFastFeatureCount); // bool bOLCFastGetExtent = pOGRLayer->TestCapability(OLCFastGetExtent); // bool bOLCFastSetNextByIndex= pOGRLayer->TestCapability(OLCFastSetNextByIndex); // bool bOLCCreateField = pOGRLayer->TestCapability(OLCCreateField); // bool bOLCDeleteFeature = pOGRLayer->TestCapability(OLCDeleteFeature); // bool bOLCStringsAsUTF8 = pOGRLayer->TestCapability(OLCStringsAsUTF8); // bool bOLCTransactions = pOGRLayer->TestCapability(OLCTransactions); // wxString sFIDColName = wgMB2WX(pOGRLayer->GetFIDColumn()); // m_pGISFeatureSet = new IwxGISFeatureSet(m_poLayer); m_OGRFeatureArray.reserve(m_poLayer->GetFeatureCount(true)); size_t count(0); OGRFeature *poFeature; while( (count < CACHE_SIZE) && ((poFeature = m_poLayer->GetNextFeature()) != NULL) ) { AddFeature(poFeature); count++; } //OGRFeature *poFeature; //while( (poFeature = m_poLayer->GetNextFeature()) != NULL ) // m_pGISFeatureSet->AddFeature(poFeature); } m_bIsOpened = true; return true; }