void Pipe::MakeSupportedParameters() { Parts *parts = CreateParts(); Features *features = CreateFeatures(); vector<double> gold_outputs; LOG(INFO) << "Building supported feature set..."; dictionary_->StopGrowth(); parameters_->AllowGrowth(); for (int i = 0; i < instances_.size(); i++) { Instance *instance = instances_[i]; MakeParts(instance, parts, &gold_outputs); vector<bool> selected_parts(gold_outputs.size(), false); for (int r = 0; r < gold_outputs.size(); ++r) { if (gold_outputs[r] > 0.5) { selected_parts[r] = true; } } MakeSelectedFeatures(instance, parts, selected_parts, features); TouchParameters(parts, features, selected_parts); } delete parts; delete features; parameters_->StopGrowth(); LOG(INFO) << "Number of Features: " << parameters_->Size(); }
void Pipe::Run() { Parts *parts = CreateParts(); Features *features = CreateFeatures(); vector<double> scores; vector<double> gold_outputs; vector<double> predicted_outputs; timeval start, end; gettimeofday(&start, NULL); if (options_->evaluate()) BeginEvaluation(); reader_->Open(options_->GetTestFilePath()); writer_->Open(options_->GetOutputFilePath()); int num_instances = 0; Instance *instance = reader_->GetNext(); while (instance) { Instance *formatted_instance = GetFormattedInstance(instance); MakeParts(formatted_instance, parts, &gold_outputs); MakeFeatures(formatted_instance, parts, features); ComputeScores(formatted_instance, parts, features, &scores); decoder_->Decode(formatted_instance, parts, scores, &predicted_outputs); Instance *output_instance = instance->Copy(); LabelInstance(parts, predicted_outputs, output_instance); if (options_->evaluate()) { EvaluateInstance(instance, output_instance, parts, gold_outputs, predicted_outputs); } writer_->Write(output_instance); if (formatted_instance != instance) delete formatted_instance; delete output_instance; delete instance; instance = reader_->GetNext(); ++num_instances; } delete parts; delete features; writer_->Close(); reader_->Close(); gettimeofday(&end, NULL); LOG(INFO) << "Number of instances: " << num_instances; LOG(INFO) << "Time: " << diff_ms(end,start); if (options_->evaluate()) EndEvaluation(); }
// Geometrie nicht modifizieren, nur Merkmale/Ident korrigieren --------------- bool UpdateFeatures (HPROJECT hPr, LONG &rlONr, ULONG lIdent, long sta[], long mi[]) { // Objekt wiederfinden char *pUIdent = NULL; rlONr = 0L; TX_ASSERT(!(g_fAbglGeom && g_fAbglAttr)); // nie gleichzeitig Attribs und Geometrie abgleichen! // versuchen über GUID zu finden if ('\0' != guid[0]) { OBJECTGUID OUID; INITSTRUCT (OUID, OBJECTGUID); if (SUCCEEDED(CLSIDFromString (WideString(guid), &OUID.guid))) { if (SUCCEEDED(DEX_FindObjectFromGuidEx (hPr, OUID))) rlONr = OUID.lONr; } } // versuchen, über Unique Ident zu finden if (0 == rlONr && RetrieveText (g_ulUniqueId, mi, &pUIdent, false, !g_fImportObj) > 0) rlONr = FindObject (g_ulUniqueId, pUIdent); DELETE_OBJ(pUIdent); if (rlONr > 0) { // Merkmale erzeugen if (m_flag && !CreateFeatures (rlONr, mi, lIdent)) { g_lWithErrors++; // mit Fehlern importiert return false; } ULONG lOldId = (ULONG)DEX_GetObjIdent (rlONr); if (lIdent != lOldId) { MODOBJIDENT MOI; INITSTRUCT(MOI, MODOBJIDENT); MOI.lONr = rlONr; MOI.lNewIdent = lIdent; MOI.iMode = 0; if (!DEX_ModObjectIdent (MOI)) { g_lWithErrors++; // mit Fehlern importiert return false; } } CountObjects (sta[1]); return true; } else g_lNotImported++; // nicht wiedergefunden return false; }
Filterbank* CreateSPFilterBank(int ncols, int nrows, int scales, int orientations, int nfeats) { if(!isPowerOf2(ncols) || !isPowerOf2(nrows)) { Error("The Steerable Pyramid filterbank must have rows and cols power of 2","CreateSPFilterBank"); } if(scales == 0) { Error("The scale must be > 0","CreateSPFilterBank"); } if(orientations == 0) { Error("The orientations must be > 0","CreateSPFilterBank"); } if(nfeats < scales*orientations) { Error("The total number of feats shoud be at least scales*orientations","CreateSPFilterBank"); } Filterbank* bank = (Filterbank*)calloc(1,sizeof(Filterbank)); if(bank == NULL) Error(MSG1, "Filterbank"); int size = scales*orientations; bank->m_pSpectralFilterBank = NULL; bank->m_pSpectralFilterBank = (Spectrum**)calloc(size,sizeof(Spectrum*)); bank->m_pFeatures = NULL; bank->m_pFeatures = CreateFeatures(ncols,nrows,nfeats); bank->orientations = orientations; bank->scales = scales; int s,k; int width = ncols; int height = nrows; for(s = 0; s < scales; s++) { for(k = 0; k < orientations; k++) { Spectrum* filter = CreateBandPass(width,height,k+1,orientations); bank->m_pSpectralFilterBank[s*orientations + k] = filter; } width = width/2; height = height/2; } return bank; }
void applyFilterbank( Filterbank *pFilterbank, Features * pFeats ) { assert(pFilterbank!=NULL); assert(pFeats!=NULL); int gabfeatsidx = 0; pFilterbank->m_pFeatures = CreateFeatures(pFeats->ncols, pFeats->nrows, pFeats->nfeats*NSCALES*NORIENTATIONS); assert(pFilterbank->m_pFeatures!=NULL); register int idx, s, k; for(idx=0; idx < pFeats->nfeats; idx++) { DImage* pImgfeats = GetFeature(pFeats, idx); Spectrum *pSpectrumImg = NULL; pSpectrumImg = DFFT2D(pImgfeats); assert(pSpectrumImg!=NULL); for( s = 0; s < NSCALES; s++ ) { for( k = 0; k < NORIENTATIONS; k++ ) { Spectrum * pSpectrumConv = MultSpectrum(pSpectrumImg, pFilterbank->m_pSpectralFilterBank[s*NORIENTATIONS+k]); DImage * pInvImg = DInvFFT2D(pSpectrumConv); DImage * pShiftedInvImg = DShift(pInvImg); SetFeature(pFilterbank->m_pFeatures, gabfeatsidx, pShiftedInvImg); gabfeatsidx++; DestroySpectrum(&(pSpectrumConv)); DestroyDImage(&(pInvImg)); DestroyDImage(&(pShiftedInvImg)); } } DestroyDImage(&(pImgfeats)); DestroySpectrum(&(pSpectrumImg)); } }
bool CreateObject (LONG &rlONr, ULONG lIdent, long sta[], long mi[], long lgi, double gxi[], double gyi[]) { HPROJECT hPr = DEX_GetDataSourceHandle(); _ASSERTE(NULL != hPr); if (sta[0] == K_OBJEKT) return r_flag ? CreateKomplexObject (hPr, rlONr, lIdent, mi) : true; // Objekt erzeugen oder wiederfinden, Geometrie schreiben long lONr = 0L; char *pUIdent = NULL; TX_ASSERT(!(g_fAbglGeom && g_fAbglAttr)); // nie gleichzeitig Attribs und Geometrie abgleichen! if (!m_flag || g_fAbglOks || g_fAbglGeom || g_fAbglAttr || 0 != g_ulOksToAttr) { // versuchen über GUID zu finden if ('\0' != guid[0]) { OBJECTGUID OUID; INITSTRUCT (OUID, OBJECTGUID); if (SUCCEEDED(CLSIDFromString (WideString(guid), &OUID.guid))) { if (SUCCEEDED(DEX_FindObjectFromGuid (OUID))) lONr = OUID.lONr; } } // versuchen, über Unique Ident zu finden if (0 == lONr && RetrieveText (g_ulUniqueId, mi, &pUIdent, false, !g_fImportObj) > 0) lONr = FindObject (g_ulUniqueId, pUIdent); } DELETE_OBJ(pUIdent); TEXTGEOMETRIEEX TG; OBJGEOMETRIE *pOG = (OBJGEOMETRIE *)&TG; ULONG lIgnore = 0L; int iFlags = OGForceGeometry|OGNoMemoryObject|OGConverted; bool fRet = true; if (0 == lONr) { // nicht wiedergefunden // Objekt neu erzeugen iFlags |= OGNewObject; fRet = ImportGeometry (hPr, pOG, sta, lIdent, gxi, gyi, iFlags, rlONr, lIgnore); } else { // Abgleich durchführen if (g_fAbglGeom) { // Geometrie abgleichen iFlags |= OGModObject; fRet = ImportGeometry (hPr, pOG, sta, lIdent, gxi, gyi, iFlags, rlONr, lIgnore); } else { // bisherigen Oks wegschreiben if (0 != g_ulOksToAttr) { long lOldIdent = DEX_GetObjIdent(lONr); char cbOKS[MAX_OKS_LENX+1]; if (S_OK == ClassFromIdentX (lOldIdent, cbOKS)) WriteTextMerkmal (lONr, (ULONG)g_ulOksToAttr, cbOKS, strlen(cbOKS)); } // Oks abgleichen if (g_fAbglOks) { if (0 == g_ulOksFromAttr) { // Oks vom EingabeOks nehmen MODOBJIDENT MOI; INITSTRUCT(MOI, MODOBJIDENT); MOI.lONr = lONr; MOI.lNewIdent = lIdent; MOI.iMode = GetMOIMode (sta); DEX_ModObjectIdent(MOI); } else { // Oks aus EingabeAttribut verwenden char *pOks = NULL; if (RetrieveText (g_ulOksFromAttr, mi, &pOks, false, !g_fImportObj) > 0) { // Ident über Oks besorgen long lNewIdent = 0; HRESULT hr = IdentFromClassX (hPr, pOks, (ULONG *)&lNewIdent); if (S_FALSE == hr) { ErrCode RC = IdentIsDefined(lNewIdent); if (RC != EC_OKAY && !DEX_isIdentUsedEx(hPr, lNewIdent)) // neu erzeugen, wenn keine Objekte mit diesem Ident existieren hr = IdentFromClassX (hPr, pOks, (ULONG *)&lNewIdent, true); } if (0 != lNewIdent) { // jetzt Ident modifizieren MODOBJIDENT MOI; INITSTRUCT(MOI, MODOBJIDENT); MOI.lONr = lONr; MOI.lNewIdent = lNewIdent; MOI.iMode = GetMOIMode (sta); DEX_ModObjectIdent(MOI); lIdent = lNewIdent; } } DELETE_OBJ(pOks); } } // evtl. doch noch neues Objekt erzeugen if (g_fImportObj) { iFlags |= OGNewObject; fRet = ImportGeometry (hPr, pOG, sta, lIdent, gxi, gyi, iFlags, rlONr, lIgnore); } else rlONr = lONr; } } if (!fRet) { if (0 == pOG -> lONr) { g_lNotImported++; return false; } else { if (pOG -> iFlags & OGObjectHasGeometryErrors) g_lWithErrors++; // mit Fehlern importiert } } CountObjects (sta[1]); // Objekte zählen // Merkmale erzeugen if (m_flag || g_fAbglAttr) return CreateFeatures (rlONr, mi, lIdent, lIgnore); return true; }
void Pipe::TrainEpoch(int epoch) { Instance *instance; Parts *parts = CreateParts(); Features *features = CreateFeatures(); vector<double> scores; vector<double> gold_outputs; vector<double> predicted_outputs; double total_cost = 0.0; double total_loss = 0.0; double eta; int num_instances = instances_.size(); double lambda = 1.0/(options_->GetRegularizationConstant() * (static_cast<double>(num_instances))); timeval start, end; gettimeofday(&start, NULL); int time_decoding = 0; int time_scores = 0; int num_mistakes = 0; LOG(INFO) << " Iteration #" << epoch + 1; dictionary_->StopGrowth(); for (int i = 0; i < instances_.size(); i++) { int t = num_instances * epoch + i; instance = instances_[i]; MakeParts(instance, parts, &gold_outputs); MakeFeatures(instance, parts, features); // If using only supported features, must remove the unsupported ones. // This is necessary not to mess up the computation of the squared norm // of the feature difference vector in MIRA. if (options_->only_supported_features()) { RemoveUnsupportedFeatures(instance, parts, features); } timeval start_scores, end_scores; gettimeofday(&start_scores, NULL); ComputeScores(instance, parts, features, &scores); gettimeofday(&end_scores, NULL); time_scores += diff_ms(end_scores, start_scores); if (options_->GetTrainingAlgorithm() == "perceptron" || options_->GetTrainingAlgorithm() == "mira" ) { timeval start_decoding, end_decoding; gettimeofday(&start_decoding, NULL); decoder_->Decode(instance, parts, scores, &predicted_outputs); gettimeofday(&end_decoding, NULL); time_decoding += diff_ms(end_decoding, start_decoding); if (options_->GetTrainingAlgorithm() == "perceptron") { for (int r = 0; r < parts->size(); ++r) { if (!NEARLY_EQ_TOL(gold_outputs[r], predicted_outputs[r], 1e-6)) { ++num_mistakes; } } eta = 1.0; } else { CHECK(false) << "Plain mira is not implemented yet."; } MakeGradientStep(parts, features, eta, t, gold_outputs, predicted_outputs); } else if (options_->GetTrainingAlgorithm() == "svm_mira" || options_->GetTrainingAlgorithm() == "crf_mira" || options_->GetTrainingAlgorithm() == "svm_sgd" || options_->GetTrainingAlgorithm() == "crf_sgd") { double loss; timeval start_decoding, end_decoding; gettimeofday(&start_decoding, NULL); if (options_->GetTrainingAlgorithm() == "svm_mira" || options_->GetTrainingAlgorithm() == "svm_sgd") { // Do cost-augmented inference. double cost; decoder_->DecodeCostAugmented(instance, parts, scores, gold_outputs, &predicted_outputs, &cost, &loss); total_cost += cost; } else { // Do marginal inference. double entropy; decoder_->DecodeMarginals(instance, parts, scores, gold_outputs, &predicted_outputs, &entropy, &loss); CHECK_GE(entropy, 0.0); } gettimeofday(&end_decoding, NULL); time_decoding += diff_ms(end_decoding, start_decoding); if (loss < 0.0) { if (!NEARLY_EQ_TOL(loss, 0.0, 1e-9)) { LOG(INFO) << "Warning: negative loss set to zero: " << loss; } loss = 0.0; } total_loss += loss; // Compute difference between predicted and gold feature vectors. FeatureVector difference; MakeFeatureDifference(parts, features, gold_outputs, predicted_outputs, &difference); // Get the stepsize. if (options_->GetTrainingAlgorithm() == "svm_mira" || options_->GetTrainingAlgorithm() == "crf_mira") { double squared_norm = difference.GetSquaredNorm(); double threshold = 1e-9; if (loss < threshold || squared_norm < threshold) { eta = 0.0; } else { eta = loss / squared_norm; if (eta > options_->GetRegularizationConstant()) { eta = options_->GetRegularizationConstant(); } } } else { if (options_->GetLearningRateSchedule() == "fixed") { eta = options_->GetInitialLearningRate(); } else if (options_->GetLearningRateSchedule() == "invsqrt") { eta = options_->GetInitialLearningRate() / sqrt(static_cast<double>(t+1)); } else if (options_->GetLearningRateSchedule() == "inv") { eta = options_->GetInitialLearningRate() / static_cast<double>(t+1); } else if (options_->GetLearningRateSchedule() == "lecun") { eta = options_->GetInitialLearningRate() / (1.0 + (static_cast<double>(t) / static_cast<double>(num_instances))); } else { CHECK(false) << "Unknown learning rate schedule: " << options_->GetLearningRateSchedule(); } // Scale the parameter vector (only for SGD). double decay = 1 - eta * lambda; CHECK_GT(decay, 0.0); parameters_->Scale(decay); } MakeGradientStep(parts, features, eta, t, gold_outputs, predicted_outputs); } else { CHECK(false) << "Unknown algorithm: " << options_->GetTrainingAlgorithm(); } } // Compute the regularization value (halved squared L2 norm of the weights). double regularization_value = lambda * static_cast<double>(num_instances) * parameters_->GetSquaredNorm() / 2.0; delete parts; delete features; gettimeofday(&end, NULL); LOG(INFO) << "Time: " << diff_ms(end,start); LOG(INFO) << "Time to score: " << time_scores; LOG(INFO) << "Time to decode: " << time_decoding; LOG(INFO) << "Number of Features: " << parameters_->Size(); if (options_->GetTrainingAlgorithm() == "perceptron" || options_->GetTrainingAlgorithm() == "mira") { LOG(INFO) << "Number of mistakes: " << num_mistakes; } LOG(INFO) << "Total Cost: " << total_cost << "\t" << "Total Loss: " << total_loss << "\t" << "Total Reg: " << regularization_value << "\t" << "Total Loss+Reg: " << total_loss + regularization_value << endl; }
Features* SteerablePyramidFeats(Features* feats) { if(feats == NULL) { Error("Features null","SteerablePyramidFeats"); } if(feats->nfeats == 0) { Error("The must be at least one feat. Features->nfeats == 0!!","SteerablePyramidFeats"); } int i,j; bool fillwzeroes = false; int nfeats = feats->nfeats; DImage** featsArray = (DImage**)calloc(1,sizeof(DImage*)*feats->nfeats); /// Getting all image Features and turning them into power-of-2 sided DImages for( i = 0; i < nfeats; i++) { DImage* feat = GetFeature(feats,i); if(!isPowerOf2(feats->ncols) || !isPowerOf2(feats->nrows)) { featsArray[i] = DImagePower2(feat); DestroyDImage(&feat); fillwzeroes = true; } else { featsArray[i] = feat; } } int index = 0; int ncols = featsArray[0]->ncols; int nrows = featsArray[0]->nrows; int totalnfeats = nfeats*SPSCALES*SPORIENTATIONS; Filterbank* bank = CreateSPFilterBank(ncols, nrows, SPSCALES, SPORIENTATIONS, totalnfeats); for( i = 0; i < nfeats; i++) { ApplySPFilterBank(featsArray[i], bank, index); /** Updates the index where the features should be placed in bank->m_pFeatures, * i.e 0, 24, 48, etc. **/ index += SPSCALES*SPORIENTATIONS; DestroyDImage (&featsArray[i]); } free(featsArray); Features* result = NULL; /// If image was filled with zeroes, remove zeroes **/ if(fillwzeroes) { result = RemoveZeroes(bank->m_pFeatures,feats->ncols, feats->nrows); } else { result = CreateFeatures(ncols,nrows,totalnfeats); for (i = 0; i< result->nelems; i++) for (j = 0; j< result->nfeats; j++) result->elem[i].feat[j] = bank->m_pFeatures->elem[i].feat[j]; } // destroy filter bank for (i = 0; i < bank->scales*bank->orientations; i++) { DestroySpectrum(&bank->m_pSpectralFilterBank[i]); } free(bank->m_pSpectralFilterBank); DestroyFeatures(&bank->m_pFeatures); free(bank); return result; }