void CRegionalSvmModel::SetOptions( const string& strOptions, int id ) { ASSERT( id>=0 && id<GetOutputs() ); if( m_vcOptions.size()<GetOutputs() )m_vcOptions.resize( GetOutputs() ); m_vcOptions[id] = strOptions; }
CMetaModel* CRegionalSvmModel::CreateMetaModel() { CSvmModel* pModel = new CSvmModel(); pModel->SetInputs( GetInputs() ); pModel->SetOutputs( GetOutputs() ); for( int i=0; i<GetOutputs(); i++ )pModel->SetOptions( m_vcOptions[i], i ); return pModel; }
REAL CMetaModel::Test( const REAL* prInputs, const REAL* prOutputs, int count ) { vector<REAL> vcOutputs( GetOutputs() ); //calculate the error function under the current weight REAL rError = 0; for( int i=0; i<count; i++ ){ Predict( prInputs + i*GetInputs(), &vcOutputs[0] ); rError += inner_product( vcOutputs.begin(), vcOutputs.end(), prOutputs, 0.0, plus<REAL>(), minus_pow<REAL>(2.0) ) / GetOutputs(); } return rError/count; }
void CSvmModel::Train( const string& strTrainData, const string& strValidData ) { Release(); for( int i=0; i<GetOutputs(); i++ ){ TransformToSvmData( strTrainData, GetInputs(), GetOutputs(), i, SVM_TRAIN_FILE ); string strModelFile = CreateUniqueModelName(); RunSvmTrain( SVM_TRAIN_FILE, /*parameters*/GetOptions(i), strModelFile ); m_vcModelFiles.push_back( strModelFile ); SVM_MODEL* model=svm_load_model( strModelFile.c_str() ); m_vcModels.push_back( model ); } }
void CSvmModel::Predict( const REAL* prInputs, REAL* prOutputs ) { //write the inputs into a temporary test file /* { ofstream ofg(TEST_FILE); vector<REAL> vcInputs( prInputs, prInputs+GetInputs() ); vector<REAL> vcOutputs; TransformSvmLine( ofg, vcInputs, vcOutputs, 0 ); }*/ //predict for each model and put each output into prOutputs[i] for( int i=0; i<GetOutputs(); i++ ){ svm_predict( m_vcModels[i], prInputs, GetInputs(), &prOutputs[i] ); // RunSvmPredict( TEST_FILE, m_vcModelFiles[i], prOutputs[i] ); } }
void CRegionalMetaModel::Train( const string& strTrainData, const string& strValidData ) { int nInputs = GetInputs(); int nOutputs = GetOutputs(); vector< vector<REAL> > vcInputs; vector< vector<REAL> > vcOutputs; vector< REAL > vcPtCen = m_vcPtCen; dist_pair_vector vcDists; vector< vector<int> > vcIdSet; Release(); //read all the data into vcInputs and vcOutputs. ReadTrainingData( strTrainData, nInputs, nOutputs, vcInputs, vcOutputs ); //compute the distance to the center point. ComputeDistance( vcInputs, vcPtCen, vcDists ); //subdivid the training data into clusters. SubdividTrainingData( vcDists, vcIdSet, m_nMinCutPts ); //create the training set for each hierarch. for( int i=0; i<vcIdSet.size(); i++ ){ sort( vcIdSet[i].begin(), vcIdSet[i].end() ); //write a training set to files and run the matlab trainer WriteTrainingFile( REG_TRAIN_FILE, REG_VALID_FILE, vcInputs, vcOutputs, vcIdSet[i] ); CMetaModel* pModel = CreateMetaModel(); pModel->Train( REG_TRAIN_FILE, REG_VALID_FILE ); CTrustRegion* pRegion = new CTrustRegion(); ComputeTrustRegion( pRegion, vcInputs, vcIdSet[i] ); m_vcMetaModels.push_back( pModel ); pRegion->SetModelId( m_vcMetaModels.size()-1 ); m_vcRegions.push_back( pRegion ); } cdump<<"training finsihed:"<<vcIdSet.size()<<" nets were trained!"<<endl; }
/////////////////////////////////////////////////////////////////////////////// // Process the configuration settings. This will set up the recordCopier. // Note: Check HaveConfigurationError() for the result. /////////////////////////////////////////////////////////////////////////////// void GeoLoadCombineAddressRange::ProcessConfiguration() { if (!configChanged) { return; } ::ProcessConfiguration(); // Output record starts out empty outputRecord = new Record; // Make a new record copier recordCopier = new RecordCopier; // Must have an output DataSourceList outputs = GetOutputs(); if (outputs.size() == 0) { ConfigError("Must have at least one output attached"); } // Get references to all inputs. DataSourceRef input = GetFirstInput(); if (input == 0) { ConfigError("Must have at least one input attached"); return; } // Output is always copy of input record schema outputRecord = new Record(*input->GetRecord()); // Copy entire record; this is only very slightly wasteful of CPU. recordCopier->AddRecordTransfers(outputRecord); // Configuration processing. Walk the DataItem hierarchy and // transform that into the data-file, file format, and record layout. DataItemRef tmp; /////////////////////////////////////////////////////////////////////////////// // Specified fields /////////////////////////////////////////////////////////////////////////////// postcodeFieldName = ""; tlidFieldName = ""; leftRightFieldName = ""; fraddrFieldName = ""; toaddrFieldName = ""; tmp = config["ZIP"]; if (tmp != 0) { postcodeFieldName = TsString(*tmp); } if (outputRecord->GetField(postcodeFieldName) == 0) { ConfigError("ZIP field '" + postcodeFieldName + "' does not exist on input record"); } tmp = config["TLID"]; if (tmp != 0) { tlidFieldName = TsString(*tmp); } if (outputRecord->GetField(tlidFieldName) == 0) { ConfigError("TLID field '" + tlidFieldName + "' does not exist on input record"); } tmp = config["LEFTRIGHT"]; if (tmp != 0) { leftRightFieldName = TsString(*tmp); } if (outputRecord->GetField(leftRightFieldName) == 0) { ConfigError("LEFTRIGHT field '" + leftRightFieldName + "' does not exist on input record"); } tmp = config["FRADDR"]; if (tmp != 0) { fraddrFieldName = TsString(*tmp); } if (outputRecord->GetField(fraddrFieldName) == 0) { ConfigError("FRADDR field '" + fraddrFieldName + "' does not exist on input record"); } tmp = config["TOADDR"]; if (tmp != 0) { toaddrFieldName = TsString(*tmp); } if (outputRecord->GetField(toaddrFieldName) == 0) { ConfigError("TOADDR field '" + toaddrFieldName + "' does not exist on input record"); } }
void NewMode(int selectedmode) { struct UnitNode *unit = NULL; struct ModeNode *mode = NULL; ULONG id = AHI_INVALID_ID; Fixed MinOutVol = 0, MaxOutVol = 0, MinMonVol = 0, MaxMonVol = 0; Fixed MinGain = 0, MaxGain = 0; double Min, Max, Current; int offset; state.ModeSelected = selectedmode; unit = (struct UnitNode *) GetNode(state.UnitSelected, UnitList); if( selectedmode != ~0 ) { mode = (struct ModeNode *) GetNode(selectedmode, ModeList); } if( mode != NULL ) { id = mode->ID; AHI_GetAudioAttrs(id, NULL, AHIDB_IndexArg, unit->prefs.ahiup_Frequency, AHIDB_Index, (ULONG) &state.FreqSelected, AHIDB_Frequencies, (ULONG) &state.Frequencies, AHIDB_MaxChannels, (ULONG) &state.Channels, AHIDB_Inputs, (ULONG) &state.Inputs, AHIDB_Outputs, (ULONG) &state.Outputs, AHIDB_MinOutputVolume, (ULONG) &MinOutVol, AHIDB_MaxOutputVolume, (ULONG) &MaxOutVol, AHIDB_MinMonitorVolume, (ULONG) &MinMonVol, AHIDB_MaxMonitorVolume, (ULONG) &MaxMonVol, AHIDB_MinInputGain, (ULONG) &MinGain, AHIDB_MaxInputGain, (ULONG) &MaxGain, AHIDB_BufferLen, 128, AHIDB_Author, (ULONG) authorBuffer, AHIDB_Copyright, (ULONG) copyrightBuffer, AHIDB_Driver, (ULONG) driverBuffer, AHIDB_Version, (ULONG) versionBuffer, AHIDB_Annotation, (ULONG) annotationBuffer, TAG_DONE); } state.ChannelsSelected = unit->prefs.ahiup_Channels; state.ScaleModeSelected = unit->prefs.ahiup_ScaleMode; state.InputSelected = unit->prefs.ahiup_Input; state.OutputSelected = unit->prefs.ahiup_Output; // Limit channels state.Channels = min(state.Channels, 32); if(unit->prefs.ahiup_Unit == AHI_NO_UNIT) { state.ChannelsDisabled = TRUE; } else { state.ChannelsDisabled = FALSE; } if(MinOutVol == 0) { MinOutVol = 1; state.OutVolMute = TRUE; state.OutVols = 1; } else { state.OutVolMute = FALSE; state.OutVols = 0; } if(MinMonVol == 0) { MinMonVol = 1; state.MonVolMute = TRUE; state.MonVols = 1; } else { state.MonVolMute = FALSE; state.MonVols = 0; } if(MinGain == 0) { MinGain = 1; state.GainMute = TRUE; state.Gains = 1; } else { state.GainMute = FALSE; state.Gains = 0; } if(MaxOutVol == 0) { state.OutVolSelected = 0; state.OutVolOffset = 0; } else { Current = 20 * log10( unit->prefs.ahiup_OutputVolume / 65536.0 ); Min = floor(20 * log10( MinOutVol / 65536.0 ) / DBSTEP + 0.5) * DBSTEP; Max = floor(20 * log10( MaxOutVol / 65536.0 ) / DBSTEP + 0.5) * DBSTEP; state.OutVolSelected = (Current - Min) / DBSTEP + 0.5 + state.OutVols; state.OutVols += ((Max - Min) / DBSTEP) + 1; state.OutVolOffset = Min; } if(MaxMonVol == 0) { state.MonVolSelected = 0; state.MonVolOffset = 0; } else { Current = 20 * log10( unit->prefs.ahiup_MonitorVolume / 65536.0 ); Min = floor(20 * log10( MinMonVol / 65536.0 ) / DBSTEP + 0.5) * DBSTEP; Max = floor(20 * log10( MaxMonVol / 65536.0 ) / DBSTEP + 0.5) * DBSTEP; state.MonVolSelected = (Current - Min) / DBSTEP + 0.5 + state.MonVols; state.MonVols += ((Max - Min) / DBSTEP) + 1; state.MonVolOffset = Min; } if(MaxGain == 0) { state.GainSelected = 0; state.GainOffset = 0; } else { Current = 20 * log10( unit->prefs.ahiup_InputGain / 65536.0 ); Min = floor(20 * log10( MinGain / 65536.0 ) / DBSTEP + 0.5) * DBSTEP; Max = floor(20 * log10( MaxGain / 65536.0 ) / DBSTEP + 0.5) * DBSTEP; state.GainSelected = (Current - Min) / DBSTEP + 0.5 + state.Gains; state.Gains += ((Max - Min) / DBSTEP) + 1; state.GainOffset = Min; } // Make sure everything is within bounds! state.FreqSelected = max(state.FreqSelected, 0); state.FreqSelected = min(state.FreqSelected, state.Frequencies); state.ChannelsSelected = max(state.ChannelsSelected, 1); state.ChannelsSelected = min(state.ChannelsSelected, state.Channels); state.ScaleModeSelected = max(state.ScaleModeSelected, 0); state.ScaleModeSelected = min(state.ScaleModeSelected, AHI_SCALE_FIXED_6_DB); state.OutVolSelected = max(state.OutVolSelected, 0); state.OutVolSelected = min(state.OutVolSelected, state.OutVols); state.MonVolSelected = max(state.MonVolSelected, 0); state.MonVolSelected = min(state.MonVolSelected, state.MonVols); state.GainSelected = max(state.GainSelected, 0); state.GainSelected = min(state.GainSelected, state.Gains); state.InputSelected = max(state.InputSelected, 0); state.InputSelected = min(state.InputSelected, state.Inputs); state.OutputSelected = max(state.OutputSelected, 0); state.OutputSelected = min(state.OutputSelected, state.Outputs); // Remove any \r's or \n's from version string offset = strlen(versionBuffer); while((offset > 0) && ((versionBuffer[offset-1] == '\r') || (versionBuffer[offset-1] == '\n'))) { versionBuffer[offset-1] = '\0'; offset--; } FreeVec(Inputs); FreeVec(Outputs); Inputs = GetInputs(id); Outputs = GetOutputs(id); }
void CLevel::SaveToFile() { if (!m_sFile.length()) { TAssert(m_sFile.length()); TError("Can't find level file \'" + m_sFile + "\' to save.\n"); return; } std::basic_ofstream<tchar> f(m_sFile.c_str()); tstring sMessage = "// Generated by the Tinker Engine\n// Feel free to modify\n\n"; f.write(sMessage.data(), sMessage.length()); tstring sName = "Name: " + m_sName + "\n"; f.write(sName.data(), sName.length()); tstring sGameMode = "GameMode: " + m_sGameMode + "\n\n"; f.write(sGameMode.data(), sGameMode.length()); for (size_t i = 0; i < m_aLevelEntities.size(); i++) { auto pEntity = &m_aLevelEntities[i]; tstring sEntity = "Entity: " + pEntity->GetClass() + "\n{\n"; f.write(sEntity.data(), sEntity.length()); if (pEntity->GetName().length()) { tstring sName = "\tName: " + pEntity->GetName() + "\n"; f.write(sName.data(), sName.length()); } for (auto it = pEntity->GetParameters().begin(); it != pEntity->GetParameters().end(); it++) { if (it->first == "Name") continue; tstring sName = "\t" + it->first + ": " + it->second + "\n"; f.write(sName.data(), sName.length()); } for (size_t j = 0; j < pEntity->GetOutputs().size(); j++) { auto pOutput = &pEntity->GetOutputs()[j]; tstring sOutput = "\n\tOutput: " + pOutput->m_sOutput + "\n\t{\n"; f.write(sOutput.data(), sOutput.length()); if (pOutput->m_sTargetName.length()) { tstring sTarget = "\t\tTarget: " + pOutput->m_sTargetName + "\n"; f.write(sTarget.data(), sTarget.length()); } if (pOutput->m_sInput.length()) { tstring sInput = "\t\tInput: " + pOutput->m_sInput + "\n"; f.write(sInput.data(), sInput.length()); } if (pOutput->m_sArgs.length()) { tstring sArgs = "\t\tArgs: " + pOutput->m_sArgs + "\n"; f.write(sArgs.data(), sArgs.length()); } if (pOutput->m_bKill) { tstring sKill = "\t\tKill: yes\n"; f.write(sKill.data(), sKill.length()); } tstring sClose = "\t}\n"; f.write(sClose.data(), sClose.length()); } tstring sClose = "}\n\n"; f.write(sClose.data(), sClose.length()); } TMsg("Wrote level file '" + m_sFile + "'\n"); }
void CRegionalNeuralModel::SetTrainBias( REAL* prTrainBias ) { ASSERT( m_vcLayerNodes.size() >=3 ); m_vcTrainBias.assign( prTrainBias, prTrainBias+GetOutputs()+1 ); }
void CNeuralModel::SetTrainBias( REAL* prTrainBias ) { ASSERT( m_vcLayerNodes.size() >=3 ); //the last element is the weight coefficient m_vcTrainBias.assign( prTrainBias, prTrainBias+GetOutputs()+1 ); }
void CRegionalMetaModel::Predict( const REAL* prInputs, REAL* prOutputs ) { vector< CTrustRegion* >hitRegions; //best region or multiple region? FindTrustRegions( prInputs, hitRegions ); // FindBestRegion( prInputs, hitRegions ); int nOutputs = GetOutputs(); int nHitRegions = hitRegions.size(); //for each trusted regional model, predict the result to vcOutputs[i][1...nOutputs] vector< vector<REAL> > vcOutputs(nHitRegions); for( int i=0; i<nHitRegions; i++ ){ vcOutputs[i].resize( nOutputs ); CMetaModel* pModel = m_vcMetaModels[ hitRegions[i]->GetModelId() ]; pModel->Predict( prInputs, &vcOutputs[i][0] ); } int nInputs = GetInputs(); REAL rSumWeights = 0; vector< REAL > vcSum( nOutputs, 0.0 ); //modified on 02/012/05 using trust probability for( i=0; i<nHitRegions; i++ ){ ASSERT( nInputs==hitRegions[i]->m_ptCen.size() ); vector<REAL> vcDistSqr(nInputs, 0.0); vector<REAL> vcRadSqr(nInputs, 0.0); vector<REAL> vcProbs(nInputs,0.0); transform( prInputs, prInputs+nInputs, hitRegions[i]->m_ptCen.begin(), vcDistSqr.begin(), diff_sqr<REAL>() ); // cout<<"dist sqr:"; // copy( vcDistSqr.begin(), vcDistSqr.end(), ostream_iterator<REAL>(cout, " ") ); cout<<endl; transform( hitRegions[i]->m_vcRadius.begin(), hitRegions[i]->m_vcRadius.end(), hitRegions[i]->m_vcRadius.begin(), vcRadSqr.begin(), multiplies<REAL>() ); // cout<<"radius sqr:"; // copy( vcRadSqr.begin(), vcRadSqr.end(), ostream_iterator<REAL>(cout, " ") ); cout<<endl; transform( vcDistSqr.begin(), vcDistSqr.end(), vcRadSqr.begin(), vcProbs.begin(), divides<REAL>() ); // cout<<"probs :"; // copy( vcProbs.begin(), vcProbs.end(), ostream_iterator<REAL>(cout, " ") ); cout<<endl; REAL rProb = accumulate( vcProbs.begin(), vcProbs.end(), 0.0) / nInputs; rProb = max( 1-rProb, 0.0 ); //the first global model is always trusted. if( i==0 && rProb<=0 )rProb = max( rProb, 1e-3 ); cdump<<"prob "<<i<<" "<<rProb<<"\t"; // REAL rWeight = rProb / hitRegions[i]->GetSphereRadius(); REAL rWeight = rProb; for( int j=0; j<nOutputs; j++ ){ vcSum[j] += vcOutputs[i][j]*rWeight; } rSumWeights += rWeight; } if( rSumWeights > 0 ){ transform( vcSum.begin(), vcSum.end(), vcSum.begin(), bind2nd(divides<REAL>(), rSumWeights) ); copy( vcSum.begin(), vcSum.end(), prOutputs ); }else{ copy( vcOutputs[0].begin(), vcOutputs[0].end(), prOutputs ); } //compute the average outputs according to inverse sphere radius /* vector< REAL > vcSum( nOutputs, 0.0 ); REAL rSumInvRadius = 0; for( i=0; i<nHitRegions; i++ ){ REAL rInvRadius = 1.0 / hitRegions[i]->GetSphereRadius(); for( int j=0; j<nOutputs; j++ ){ vcSum[j] += vcOutputs[i][j]*rInvRadius; } rSumInvRadius += rInvRadius; } transform( vcSum.begin(), vcSum.end(), vcSum.begin(), bind2nd(divides<REAL>(), rSumInvRadius) ); copy( vcSum.begin(), vcSum.end(), prOutputs ); */ cdump<<"pred..."<<nHitRegions<<" nets"<<endl; }
string CSvmModel::GetOptions( int id) { ASSERT( id>=0 && id<GetOutputs() ); return m_vcOptions[id]; // return m_strOptions; }
////////////////////////////////////////////////////////////////////////////// // This is top-level function which generates the PIC code for pages of // the algorithm. ////////////////////////////////////////////////////////////////////////////// void DrawPic(CSV_INPUT *csv_input, int nLn) { char pages[MAXPAGES][32]; // array for the list of UNIQE album page IDs int nUniq = 1; bool foundPage = false; static BOX box[MAXBOX]; CSV_INPUT thisPage[MAXCOMPPPAGE]; // buffer for parsed initial data portion for this specific album page memset((char*)pages, '\x0', sizeof(pages)); memset(thisPage, '\x0', sizeof(thisPage)); memset(box, '\x0', sizeof(box)); //////////////////////////////////////////////////////////////////////////////////////// // Since we're trying to restore box-and-arrows view of the initial data on page basis // the 1st thing we need is a list of uniq pages IDs present in the album // The Following code makes uniq list of album pages. //////////////////////////////////////////////////////////////////////////////////////// // Fill the 1st element skipping the header (start at 1) strcpy(pages[0], (csv_input + 1)->page_alg); for (int i = 1; i < nLn; i++) // Start at 1 to skip the header { foundPage = false; for (int j = 0; j < nUniq; j++) { if ( !strcmp( (csv_input + i)->page_alg, pages[j]) ) { foundPage = true; break; } } if (!foundPage) { strcpy(pages[nUniq], (csv_input + i)->page_alg); nUniq++; } } fprintf(stderr, "DrawPic: nUniq = %d\n", nUniq); for (int i = 0; i < nUniq; i++) fprintf(stderr, "%d: %s\n", i, pages[i]); //////////////////////////////////////////////////////////////////////////////////// // OK. We have a list of uniq page IDs. // Now: // 1. Go page by page. Select rows from csv_input for each page ID // 2. Build connection table for all components (aka boxes) sitting on the page //////////////////////////////////////////////////////////////////////////////////// int nCmp = 0; FILE *flp; char fname[1024] = ""; for (int i = 0; i < nUniq; i++) // go page by page on the uniq page names list { // Open PIC file in the directory defined in configuration memset(fname, '\x0', sizeof(fname)); sprintf(fname, "%s/%s.pic", conf.picdir, pages[i]); flp = fopen(fname, "w"); memset(box, '\x0', sizeof(box)); if (!flp) { fprintf(stderr, "DrawPic Error: Can not open file %s\n", fname); continue; } nCmp = 0; int n1 = 0; // We're looking for boxes on the page pages[i], we'll go through the whole data set and pick up // all matching records, storing them into thisPage buffer (restricted by MAXCOMPPPAGE = 128 boxes per page) // thisPage is CSV_INPUT buffer, that is we just store parsed CSV data into it for further analysis for (int j = 0; j < nLn; j++) // Go through the whole PTC and pick components for the specific page { if ( !strcmp( (csv_input + j)->page_alg, pages[i]) ) { memcpy((char*)&thisPage[nCmp], (char*)(csv_input + j), sizeof(CSV_INPUT)); // OK. Let's start filling BOX structures with the data we can immediately get from CSV // Fill box structire with CSV_INPUT data // actually collected earlier by the parser box[nCmp].n = thisPage[nCmp].nom_row; // Row number (aka box number) strcpy( box[nCmp].type, thisPage[nCmp].run_compon); // Component type n1 = 0; while (strlen(thisPage[nCmp].inputs[n1])) // Input names { strcpy( box[nCmp].inputs[n1].name, thisPage[nCmp].inputs[n1]); n1++; } box[nCmp].nInp = n1; // This is how we know how many inputs the has n1 = 0; while (strlen(thisPage[nCmp].outputs[n1])) // Output names (same way as for inputs) { strcpy( box[nCmp].outputs[n1].name, thisPage[nCmp].outputs[n1]); n1++; } box[nCmp].nOut = n1; // Number of outputs nCmp++; } } fprintf(stderr, "Page %s: %d components\n", pages[i], nCmp); /////////////////////////////////////////////////////////////////////////////////////////////////// // Done. We have here complete box[] structure array + number of components on this specific // album page known as nCmp // Now we need to fill in to/from data // Logic: // Input is some boxe's output or direct page input // We go box by box, checking each input against outputs of other boxes // If it's found then we put boxe's number to 'from' structire member // Otherwise we put _PAGE_INPUT instead. /////////////////////////////////////////////////////////////////////////////////////////////////// // Collect inputs and outputs, set cross-references for (int i = 0; i < nCmp; i++) // box by box loop { GetInputs(box, i, nCmp); GetOutputs(box, i, nCmp); } // Print out thisPage and box arrays to stderr // normally redirected to a log file LogDwgData(thisPage, box, nCmp); // Now try to generate PIC code for the page fprintf(flp, ".PS %g\n", conf.scale); // Scale // This is where the PIC code is actually made up and written to a file GeneratePICsrc(flp, box, nCmp); fprintf(flp, ".PE\n"); if (flp) { fclose(flp); flp = NULL; } } return; }