bool Skim_Group::Skim_Filename (char *filename) { int i, num_field; bool binary_flag; Db_Field *fld; if (skim_file == NULL || skim_a == NULL || skim_b == NULL) return (false); //---- open the skim file ---- char buffer [FIELD_BUFFER]; str_fmt (buffer, sizeof (buffer), "Zone Skim File #%d", Group ()); skim_file->File_Type (buffer); skim_a->File_Type (buffer); skim_b->File_Type (buffer); skim_file->File_ID (str_fmt (buffer, sizeof (buffer), "Skim%d", Group ())); skim_a->File_ID (str_fmt (buffer, sizeof (buffer), "Skim%dA", Group ())); skim_b->File_ID (str_fmt (buffer, sizeof (buffer), "Skim%dB", Group ())); if (!skim_file->Open (filename)) return (false); if (!skim_a->Add_Field ("KEY", Db_Code::INTEGER, sizeof (int), 0, 0, true)) return (false); if (!skim_b->Add_Field ("KEY", Db_Code::INTEGER, sizeof (int), 0, 0, true)) return (false); num_field = skim_file->Num_Fields (); binary_flag = (skim_file->Record_Format () == Db_Code::BINARY); for (i=1; i <= num_field; i++) { if (i == skim_file->Origin_Field ()) continue; if (i == skim_file->Destination_Field ()) continue; if (i == skim_file->Period_Field ()) continue; fld = skim_file->Field (i); if (fld == NULL) continue; if (!str_cmp (fld->Name (), "NOTES")) continue; if (!skim_a->Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Decimal (), END_OF_RECORD, binary_flag)) return (false); if (!skim_b->Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Decimal (), END_OF_RECORD, binary_flag)) return (false); } dump_field = skim_a->Optional_Field ("TIME", "DRIVE", "TRANSIT", "WALK", "COST"); return (true); }
void ArcAddZ::Copy_Shape (File_Group *arcview) { int i, nfield, z_field; bool binary; XYZ_Point pt; Db_Field *fld; z_field = arcview->input.Optional_Field ("Z_COORD", "ELEVATION", "Z"); //----- create the fields ---- nfield = arcview->input.Num_Fields (); binary = (arcview->input.Record_Format () == Db_Code::BINARY); for (i=1; i <= nfield; i++) { fld = arcview->input.Field (i); arcview->output.Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Decimal (), -1, binary); } arcview->output.Write_Header (); //---- coppy the fields ---- if (arcview->group_num > 0) { Show_Message ("Coping ArcView Shape File #%d -- Record", arcview->group_num); } else { Show_Message ("Coping ArcView Shape File -- Record"); } Set_Progress (); //---- read the input shape file ---- while (arcview->input.Read_Record ()) { Show_Progress (); arcview->output.parts.Reset (); arcview->output.points.Reset (); arcview->output.Copy_Fields (&arcview->input); if (z_field > 0) { arcview->output.Put_Field (z_field, z_coord); } for (i=1; i <= arcview->input.Num_Parts (); i++) { if (!arcview->output.parts.Add (arcview->input.parts [i])) goto point_error; } for (i=1; i <= arcview->input.Num_Points (); i++) { pt = *(arcview->input.points [i]); pt.z = z_coord; if (!arcview->output.points.Add (&pt)) goto point_error; } //---- save the diff record ---- if (!arcview->output.Write_Record ()) { Error ("Writing %s", arcview->output.File_Type ()); } } End_Progress (); arcview->input.Close (); arcview->output.Close (); return; point_error: Error ("Insufficient Memory for Output Shape Points"); return; }
void LocationData::Set_Files (void) { int i, nfld, key; bool binary; Db_Field *fld; Db_Base *file; Db_Sort_Array *data; Data_Itr data_itr; Polygon_Itr poly_itr; //---- create the program data structure ---- data_rec.push_back (input_file); data_rec.push_back (output_file); //---- convert each data file to binary ---- for (data_itr = data_group.begin (); data_itr != data_group.end (); data_itr++) { file = data_itr->file; data = data_itr->data_db; data_rec.push_back ((Db_Base *) data); data->File_ID (file->File_ID ()); data->File_Type (file->File_Type ()); nfld = file->Num_Fields (); key = data_itr->join_field; binary = (file->Record_Format () == BINARY); fld = file->Field (key); data->Add_Field (fld->Name (), DB_INTEGER, 10); data->Add_Field ("AL_COUNT", DB_INTEGER, 10); for (i=0; i < nfld; i++) { if (i == key) continue; fld = file->Field (i); if (fld != 0) { data->Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Units (), binary); } } } //---- read the arcview boundary file ---- for (poly_itr = polygons.begin (); poly_itr != polygons.end (); poly_itr++) { file = poly_itr->file; data = poly_itr->data_db; data_rec.push_back ((Db_Base *) data); data->File_ID (file->File_ID ()); data->File_Type (file->File_Type ()); nfld = file->Num_Fields (); binary = (file->Record_Format () == BINARY); data->Add_Field ("INDEX", DB_INTEGER, 10); for (i=0; i < nfld; i++) { fld = file->Field (i); if (fld != 0) { data->Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Units (), binary); } } } Write (1, "Compiling Conversion Script"); if (Report_Flag (PRINT_SCRIPT)) { Header_Number (PRINT_SCRIPT); if (!Break_Check (10)) { Print (1); Page_Header (); } } program.Initialize (data_rec, random.Seed () + 1); if (!program.Compile (program_file, Report_Flag (PRINT_SCRIPT))) { Error ("Compiling Conversion Script"); } if (Report_Flag (PRINT_STACK)) { Header_Number (PRINT_STACK); program.Print_Commands (false); } Header_Number (0); }
void LinkSum::Program_Control (void) { int i, field, ngroup, num; bool binary; String key, token; Location_File *location_file; Db_Field *fld; Doubles dbl; Doubles_Itr itr; //---- set the equivalence flags ---- Zone_Equiv_Flag (Check_Control_Key (NEW_GROUP_TRAVEL_FILE)); group_select = Set_Control_Flag (SELECT_BY_LINK_GROUP); Link_Equiv_Flag (group_select || Report_Flag (LINK_GROUP) || Report_Flag (TRAVEL_TIME) || Report_Flag (GROUP_REPORT) || Report_Flag (GROUP_SUMMARY) || Check_Control_Key (NEW_GROUP_SUMMARY_FILE)); //---- open network files ---- Data_Service::Program_Control (); Read_Select_Keys (); turn_flag = System_File_Flag (TURN_DELAY); Print (2, String ("%s Control Keys:") % Program ()); //---- open the compare performance file ---- key = Get_Control_String (COMPARE_PERFORMANCE_FILE); if (!key.empty ()) { compare_file.File_Type ("Compare Performance File"); Print (1); if (Check_Control_Key (COMPARE_PERFORMANCE_FORMAT)) { compare_file.Dbase_Format (Get_Control_String (COMPARE_PERFORMANCE_FORMAT)); } compare_file.Open (Project_Filename (key)); compare_flag = true; } num_inc = sum_periods.Num_Periods (); if (num_inc < 1) num_inc = 1; cap_factor = (double) sum_periods.Range_Length () / (Dtime (1, HOURS) * num_inc); if (cap_factor <= 0.0) cap_factor = 1.0; //---- open the compare link map file ---- key = Get_Control_String (COMPARE_LINK_MAP_FILE); if (!key.empty ()) { link_map_file.File_Type ("Compare Link Map File"); Print (1); if (Check_Control_Key (COMPARE_LINK_MAP_FORMAT)) { link_map_file.Dbase_Format (Get_Control_String (COMPARE_LINK_MAP_FORMAT)); } link_map_file.Open (Project_Filename (key)); link_map_flag = true; } //---- get minimum volume ---- minimum_volume = Get_Control_Double (MINIMUM_LINK_VOLUME); //---- person-based statistics ---- person_flag = Get_Control_Flag (PERSON_BASED_STATISTICS); //---- get the select by link group flag ---- group_select = Get_Control_Flag (SELECT_BY_LINK_GROUP); if (group_select || select_subareas || select_polygon || select_facilities) select_flag = true; //---- turning movement data ---- if (turn_flag) { //---- open the compare turn delays file ---- key = Get_Control_String (COMPARE_TURN_DELAY_FILE); if (!key.empty ()) { turn_compare_file.File_Type ("Compare Turn Delay File"); Print (1); if (Check_Control_Key (COMPARE_TURN_DELAY_FORMAT)) { turn_compare_file.Dbase_Format (Get_Control_String (COMPARE_TURN_DELAY_FORMAT)); } turn_compare_file.Open (Project_Filename (key)); turn_compare_flag = true; } //---- select turn nodes---- key = exe->Get_Control_Text (TURN_NODE_RANGE); if (!key.empty () && !key.Equals ("ALL")) { if (!turn_range.Add_Ranges (key)) { exe->Error ("Adding Turn Node Ranges"); } } } //---- create link activity file ---- key = Get_Control_String (NEW_LINK_ACTIVITY_FILE); if (!key.empty ()) { if (!System_File_Flag (LOCATION)) { Error ("A Location File is needed for the Link Activity File"); } location_file = System_Location_File (); binary = (location_file->Record_Format () == BINARY); Print (1); activity_file.File_Type ("New Link Activity File"); if (Check_Control_Key (NEW_LINK_ACTIVITY_FORMAT)) { activity_file.Dbase_Format (Get_Control_String (NEW_LINK_ACTIVITY_FORMAT)); } activity_file.Create (Project_Filename (key)); //---- copy location field name ---- key = Get_Control_Text (COPY_LOCATION_FIELDS); if (key.empty ()) { Error ("Location Field Names are Required for Link Activity File"); } link_db.File_Type ("Link Location Database"); //---- set required fields ---- link_db.Add_Field ("LINK", DB_INTEGER, 10); activity_file.Add_Field ("LINK", DB_INTEGER, 10); //---- copy selected fields ---- while (!key.Split (token)) { field = location_file->Field_Number (token); if (field < 0) { Error (String ("Field %s was Not Found in the Location File") % token); } field_list.push_back (field); fld = location_file->Field (field); link_db.Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Units (), binary); activity_file.Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Units (), binary); } activity_file.Write_Header (); activity_flag = true; } //---- create zone travel file ---- key = Get_Control_String (NEW_ZONE_TRAVEL_FILE); if (!key.empty ()) { if (!System_File_Flag (LOCATION)) { Error ("A Location File is needed for the Zone Travel File"); } Print (1); zone_file.File_Type ("New Zone Travel File"); if (Check_Control_Key (NEW_ZONE_TRAVEL_FORMAT)) { zone_file.Dbase_Format (Get_Control_String (NEW_ZONE_TRAVEL_FORMAT)); } zone_file.Create (Project_Filename (key)); zone_flag = true; } //---- create zone group travel file ---- key = Get_Control_String (NEW_GROUP_TRAVEL_FILE); if (!key.empty ()) { if (!System_File_Flag (LOCATION)) { Error ("A Location File is needed for the Group Travel File"); } Print (1); group_file.File_Type ("New Group Travel File"); if (Check_Control_Key (NEW_GROUP_TRAVEL_FORMAT)) { group_file.Dbase_Format (Get_Control_String (NEW_GROUP_TRAVEL_FORMAT)); } group_file.Create (Project_Filename (key)); group_flag = true; } if (!activity_flag && !zone_flag && !group_flag) { System_File_False (LOCATION); } //---- get the number of direction groups ---- ngroup = Highest_Control_Group (NEW_LINK_DIRECTION_FILE, 0); if (ngroup > 0) { Dir_Group group, *group_ptr; //---- process each group ---- for (i=1; i <= ngroup; i++) { key = Get_Control_String (NEW_LINK_DIRECTION_FILE, i); if (key.empty ()) continue; Print (1); dir_group.push_back (group); group_ptr = &dir_group.back (); group_ptr->group = i; group_ptr->file = new Link_Direction_File (); group_ptr->file->File_Type (String ("New Link Direction File #%d") % i); if (Check_Control_Key (NEW_LINK_DIRECTION_FORMAT, i)) { group_ptr->file->Dbase_Format (Get_Control_String (NEW_LINK_DIRECTION_FORMAT, i)); } group_ptr->file->Lane_Use_Flows (Lane_Use_Flows ()); group_ptr->file->Create (Project_Filename (key)); //---- get the field name ---- key = Get_Control_Text (NEW_LINK_DIRECTION_FIELD, i); if (key.empty ()) { Error (String ("New Link Direction Field #%d is Missing") % i); } group_ptr->field = Performance_Code (key); group_ptr->index = Get_Control_Flag (NEW_LINK_DIRECTION_INDEX, i); group_ptr->flip = Get_Control_Flag (NEW_LINK_DIRECTION_FLIP, i); if (group_ptr->flip && !compare_flag) { Warning ("Link Direction Flipping requires Comparison Data"); group_ptr->flip = false; } Set_Link_Dir (group_ptr); } } //---- get the number of groups ---- ngroup = Highest_Control_Group (NEW_LINK_DATA_FILE, 0); if (ngroup > 0) { Data_Group group, *group_ptr; //---- process each group ---- for (i=1; i <= ngroup; i++) { key = Get_Control_String (NEW_LINK_DATA_FILE, i); if (key.empty ()) continue; Print (1); data_group.push_back (group); group_ptr = &data_group.back (); group_ptr->group = i; group_ptr->file = new Link_Data_File (); group_ptr->file->File_Type (String ("New Link Data File #%d") % i); if (Check_Control_Key (NEW_LINK_DATA_FORMAT, i)) { group_ptr->file->Dbase_Format (Get_Control_String (NEW_LINK_DATA_FORMAT, i)); } group_ptr->file->Lane_Use_Flows (Lane_Use_Flows ()); group_ptr->file->Create (Project_Filename (key)); //---- get the field name ---- key = Get_Control_Text (NEW_LINK_DATA_FIELD, i); if (key.empty ()) { Error (String ("New Link Data Field #%d is Missing") % i); } group_ptr->field = Performance_Code (key); Set_Link_Data (group_ptr); } } //---- data summary file ---- key = Get_Control_String (NEW_DATA_SUMMARY_FILE); if (!key.empty ()) { Print (1); summary_file.File_Type ("New Data Summary File"); if (Check_Control_Key (NEW_DATA_SUMMARY_FORMAT)) { summary_file.Dbase_Format (Get_Control_String (NEW_DATA_SUMMARY_FORMAT)); } summary_file.Add_Field ("MEASURE", DB_STRING, 40); summary_file.Add_Field ("VALUE", DB_DOUBLE, 12.2); if (compare_flag) summary_file.Add_Field ("COMPARE", DB_DOUBLE, 12.2); summary_file.Create (Project_Filename (key)); summary_flag = true; //---- data summary periods ---- if (!Control_Key_Empty (NEW_DATA_SUMMARY_PERIODS)) { periods_flag = data_periods.Add_Ranges (Get_Control_Text (NEW_DATA_SUMMARY_PERIODS)); } //---- data summary ratios ---- if (Check_Control_Key (NEW_DATA_SUMMARY_RATIOS)) { Double_List list; Dbl_Itr itr; Get_Control_List (NEW_DATA_SUMMARY_RATIOS, list); for (i=0, itr = list.begin (); itr != list.end (); itr++, i++) { if (i > 0 && *itr >= 1.0) { data_ratios.push_back (Round (*itr * 100.0)); ratios_flag = true; } } } } //---- group summary file ---- key = Get_Control_String (NEW_GROUP_SUMMARY_FILE); if (!key.empty ()) { Print (1); group_sum_file.File_Type ("New Group Summary File"); if (Check_Control_Key (NEW_GROUP_SUMMARY_FORMAT)) { group_sum_file.Dbase_Format (Get_Control_String (NEW_GROUP_SUMMARY_FORMAT)); } group_sum_file.Add_Field ("MEASURE", DB_STRING, 40); group_sum_file.Add_Field ("VALUE", DB_DOUBLE, 12.2); if (compare_flag) group_sum_file.Add_Field ("COMPARE", DB_DOUBLE, 12.2); group_sum_file.Create (Project_Filename (key)); group_sum_flag = true; } //---- read report types ---- List_Reports (); if (!compare_flag && (Report_Flag (TIME_CHANGE) || Report_Flag (VOLUME_CHANGE) || Report_Flag (TOP_TIME_CHANGE) || Report_Flag (TOP_VOL_CHANGE) || Report_Flag (RELATIVE_GAP))) { Error ("A Compare Performance File is Required for Change Reports"); } //---- process support data ---- if (Link_Equiv_Flag ()) { link_equiv.Read (Report_Flag (LINK_EQUIV)); } if (group_flag) { zone_equiv.Read (Report_Flag (ZONE_EQUIV)); } //---- allocate work space ---- if (periods_flag || ratios_flag) { num = (periods_flag) ? (int) data_periods.size () : 1; num += (ratios_flag) ? (int) data_ratios.size () : 1; if (num_inc + 1 > num) { num = num_inc + 1; } } else { num = num_inc + 1; } sum_bin.assign (num, dbl); for (itr = sum_bin.begin (); itr != sum_bin.end (); itr++) { itr->assign (NUM_SUM_BINS, 0.0); } }
bool Db_Base::Output_Record (void) { if (File_Format () == SQLITE3) { int i, j, num, lvalue; double dvalue; string svalue; Db_Field *fld; num = Num_Fields (); for (i=0, j=1; i < num; i++) { fld = Field (i); if (Nested () != fld->Nested ()) continue; if (Nested ()) { if (fld->Type () == DB_INTEGER) { Get_Field (i, lvalue); sqlite3_bind_int (insert_nest, j+1, lvalue); } else if (fld->Type () == DB_DOUBLE) { Get_Field (i, dvalue); sqlite3_bind_double (insert_nest, j+1, dvalue); } else { Get_Field (i, svalue); sqlite3_bind_text (insert_nest, j+1, svalue.c_str (), -1, SQLITE_TRANSIENT); } } else { if (fld->Type () == DB_INTEGER) { Get_Field (i, lvalue); sqlite3_bind_int (insert_stmt, j, lvalue); } else if (fld->Type () == DB_DOUBLE) { Get_Field (i, dvalue); sqlite3_bind_double (insert_stmt, j, dvalue); } else { Get_Field (i, svalue); sqlite3_bind_text (insert_stmt, j, svalue.c_str (), -1, SQLITE_TRANSIENT); } } j++; } if (Nested ()) { sqlite3_bind_int (insert_nest, 1, parent_id); if (sqlite3_step (insert_nest) != SQLITE_DONE) { exe->Warning ("Inserting Database: ") << sqlite3_errmsg (db_file); } if (sqlite3_reset (insert_nest) != SQLITE_OK) { exe->Warning ("Inserting Database: ") << sqlite3_errmsg (db_file); } } else { if (sqlite3_step (insert_stmt) != SQLITE_DONE) { exe->Warning ("Inserting Database: ") << sqlite3_errmsg (db_file); } parent_id = (int) sqlite3_last_insert_rowid (db_file); if (sqlite3_reset (insert_stmt) != SQLITE_OK) { exe->Warning ("Inserting Database: ") << sqlite3_errmsg (db_file); } } } else if (File_Format () == CSV_DELIMITED) { int i, j, max; string *ptr; char *delimiter, *record; bool first = true; Db_Field *fld; delimiter = Delimiters (); record = Record_String (); for (i=0; i < Num_Fields (); i++) { fld = Field (i); if (Nested () == fld->Nested ()) { ptr = &fld->Buffer (); max = (int) ptr->size (); if (max > fld->Width ()) max = fld->Width (); if (first) { first = false; } else { *record++ = *delimiter; } for (j=0; j < max; j++) { *record++ = (*ptr) [j]; } *record = '\0'; } } Set_Size (); } return (true); }
bool Db_Base::Input_Record (void) { if (File_Format () == SQLITE3) { int i, j, num, lvalue, result; const unsigned char *text; double dvalue; Db_Field *fld; //---- get the next record ---- if (Nested ()) { if (new_nest_flag) { new_nest_flag = false; sqlite3_reset (read_nest); sqlite3_bind_int (read_nest, 1, parent_id); } result = sqlite3_step (read_nest); } else { Record_Number (0); parent_id = Record_Number (); new_nest_flag = true; sqlite3_reset (read_stmt); sqlite3_bind_int (read_stmt, 1, parent_id); result = sqlite3_step (read_stmt); } if (result == SQLITE_DONE) return (false); if (result != SQLITE_ROW) { exe->Warning ("SQLITE3 Read Problem: ") << sqlite3_errmsg (db_file); } num = Num_Fields (); for (i=0, j=0; i < num; i++) { fld = Field (i); if (Nested () != fld->Nested ()) continue; if (Nested ()) { if (fld->Type () == DB_INTEGER) { lvalue = sqlite3_column_int (read_nest, j+1); Put_Field (i, lvalue); } else if (fld->Type () == DB_DOUBLE) { dvalue = sqlite3_column_double (read_nest, j+1); Put_Field (i, dvalue); } else { text = sqlite3_column_text (read_nest, j+1); Put_Field (i, (char *) text); } } else { if (fld->Type () == DB_INTEGER) { lvalue = sqlite3_column_int (read_stmt, j); Put_Field (i, lvalue); } else if (fld->Type () == DB_DOUBLE) { dvalue = sqlite3_column_double (read_stmt, j); Put_Field (i, dvalue); } else { text = sqlite3_column_text (read_stmt, j); Put_Field (i, (char *) text); } } j++; } } else if (File_Format () == CSV_DELIMITED) { Strings fields; Db_Field *fld; int count = String (Record_String ()).Parse (fields, Delimiters ()); for (int i=0; i < Num_Fields (); i++) { fld = Field (i); if (Nested () == fld->Nested ()) { if (fld->Offset () <= count) { fld->Buffer (fields [fld->Offset () - 1]); } else { fld->Buffer (""); } } } } return (true); }
bool LinkSum::Get_Location_Data (Location_File &file, Location_Data &data) { int i, field, link, lvalue1, lvalue2, zone; double dvalue1, dvalue2; String buffer; Db_Field *fld; Link_Location loc_rec; Int_Itr int_itr; Int_Map_Itr map_itr; //---- get the link number ---- link = file.Link (); if (link == 0) return (false); data.Link (link); //---- sum the activities on links ---- if (activity_flag) { if (!link_db.Read_Record (link)) { Error (String ("Reading Link Database Record %d") % link); } //---- sum the data fields ---- for (i=1, int_itr = field_list.begin (); int_itr != field_list.end (); int_itr++, i++) { field = *int_itr; fld = file.Field (field); if (fld->Type () == DB_INTEGER) { lvalue1 = file.Get_Integer (field); lvalue2 = link_db.Get_Integer (i); lvalue2 += lvalue1; link_db.Put_Field (i, lvalue2); } else if (fld->Type () == DB_DOUBLE) { dvalue1 = file.Get_Double (field); dvalue2 = link_db.Get_Double (i); dvalue2 += dvalue1; link_db.Put_Field (i, dvalue2); } else if (fld->Type () == DB_STRING) { link_db.Put_Field (i, file.Get_String (field)); } } //---- save the changes ---- if (!link_db.Write_Record (link)) { Error (String ("Writing Link Database Record %d") % link); } } //---- add a link location record ---- if (zone_flag || group_flag) { zone = file.Zone (); if (zone > 0) { map_itr = link_map.find (link); if (map_itr == link_map.end ()) return (false); link = map_itr->second; loc_rec.link = link; loc_rec.location = i = file.Location (); loc_rec.zone = zone; loc_rec.count = 0; link_location.insert (Link_Loc_Map_Data (Int2_Key (link, i), loc_rec)); } } //---- don't save the location data ---- return (false); }
void SmoothData::Program_Control (void) { int i, lvalue, nfile, nheader, nfield, increment, max_field; int naverage, default_average, niter, default_iter, max_size, max_iter; char *str_ptr, *format_ptr, buffer [FIELD_BUFFER]; char input_data [FIELD_BUFFER], output_data [FIELD_BUFFER]; char input_format [FIELD_BUFFER], output_format [FIELD_BUFFER]; char initial_file [STRING_BUFFER], initial_format [FIELD_BUFFER]; double dvalue, forward, backward, factor; bool loop_flag, initial_flag, replicate_flag; File_Group *group_ptr; nfile = 0; nheader = 1; nfield = 0; max_field = 100; increment = 0; default_average = 3; max_size = 9; forward = 20.0; backward = 20.0; default_iter = 0; max_iter = 25; loop_flag = true; initial_flag = false; replicate_flag = false; input_data [0] = output_data [0] = '\0'; str_cpy (input_format, sizeof (input_format), "HOURS"); str_cpy (output_format, sizeof (output_format), "HOURS"); //---- number of input files ---- nfile = Highest_Control_Group (INPUT_DATA_FILE_x, 0); if (nfile == 0) { Error ("No Input Data File Keys"); } file_group.Max_Records (nfile); //---- get the default file format ---- str_ptr = Get_Control_String (INPUT_DATA_FORMAT); if (str_ptr != NULL) { str_cpy (input_data, sizeof (input_data), str_ptr); } str_ptr = Get_Control_String (OUTPUT_DATA_FORMAT); if (str_ptr != NULL) { str_cpy (output_data, sizeof (output_data), str_ptr); } //---- get the default distribution filename ---- str_ptr = Get_Control_String (DISTRIBUTION_FILE); if (str_ptr != NULL) { str_cpy (initial_file, sizeof (initial_file), str_ptr); initial_flag = true; str_ptr = Get_Control_String (DISTRIBUTION_FORMAT); if (str_ptr != NULL) { str_cpy (initial_format, sizeof (initial_format), str_ptr); } else { initial_format [0] = '\0'; } } //---- get the time of day format ---- str_ptr = Get_Control_String (INPUT_TIME_FORMAT); if (str_ptr != NULL) { str_cpy (input_format, sizeof (input_format), str_ptr); } str_ptr = Get_Control_String (OUTPUT_TIME_FORMAT); if (str_ptr != NULL) { str_cpy (output_format, sizeof (output_format), str_ptr); } //---- read the field number ---- str_ptr = Get_Control_String (SMOOTH_FIELD_NUMBER); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 1 || lvalue > max_field) goto field_error; nfield = lvalue; } //---- time increment ---- str_ptr = Get_Control_String (SMOOTH_TIME_INCREMENT); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 0 || lvalue > 3600) goto increment_error; increment = lvalue; } if (increment > 0) { max_size = 2 * 3600 / increment + 1; max_iter = (max_size + 1) / 2; if (max_iter > 100) max_iter = 100; max_iter *= max_iter; } else { max_size = 9; max_iter = 25; } //---- read the number smooth records ---- str_ptr = Get_Control_String (SMOOTH_GROUP_SIZE); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 3 || lvalue > max_size) goto size_error; if ((lvalue % 2) != 1) goto odd_error; default_average = lvalue; } //---- read the percent distributed forward ---- factor = 100.0 - 50.0 / default_average; str_ptr = Get_Control_String (PERCENT_MOVED_FORWARD); if (str_ptr != NULL) { Get_Double (str_ptr, &dvalue); if (dvalue < 0.0 || dvalue > factor) goto forward_error; forward = dvalue; } //---- read the percent distributed backwarde ---- str_ptr = Get_Control_String (PERCENT_MOVED_BACKWARD); if (str_ptr != NULL) { Get_Double (str_ptr, &dvalue); if (dvalue < 0.0 || dvalue > factor) goto backward_error; backward = dvalue; } factor = forward + backward; dvalue = 100.0 - 100.0 / default_average; if (factor < 5.0 || factor > dvalue) goto combined_error; //---- number of iterations ---- str_ptr = Get_Control_String (NUMBER_OF_ITERATIONS); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 1 || lvalue > max_iter) goto iteration_error; default_iter = lvalue; } //---- read the circular smoothing flag ---- str_ptr = Get_Control_String (CIRCULAR_GROUP_FLAG); if (str_ptr != NULL) { loop_flag = Get_Control_Flag (CIRCULAR_GROUP_FLAG); } //---- read the replicate fields flag ---- str_ptr = Get_Control_String (REPLICATE_FIELDS_FLAG); if (str_ptr != NULL) { replicate_flag = Get_Control_Flag (REPLICATE_FIELDS_FLAG); } //---- open each file ---- for (i=1; i <= nfile; i++) { //---- get the next filename --- str_ptr = Get_Control_String (INPUT_DATA_FILE_x, i); if (str_ptr == NULL) continue; group_ptr = file_group.New_Record (true); group_ptr->Group (i); //---- create the input file ---- Print (1); group_ptr->Input_File (new Diurnal_File ()); str_fmt (buffer, sizeof (buffer), "Input Data File #%d", i); group_ptr->Input_File ()->File_Type (buffer); format_ptr = Get_Control_String (INPUT_DATA_FORMAT_x, i); if (format_ptr != NULL) { group_ptr->Input_File ()->Dbase_Format (format_ptr); } else if (input_data [0] != '\0') { group_ptr->Input_File ()->Dbase_Format (input_data); } //---- open the file ---- if (!group_ptr->Input_File ()->Open (Project_Filename (str_ptr))) { File_Error (buffer, group_ptr->Input_File ()->Filename ()); } //---- open the output file ---- str_ptr = Get_Control_String (OUTPUT_DATA_FILE_x, i); if (str_ptr == NULL) goto control_error; group_ptr->Output_File (new Diurnal_File (Db_Code::CREATE)); str_fmt (buffer, sizeof (buffer), "Output Data File #%d", i); group_ptr->Output_File ()->File_Type (buffer); format_ptr = Get_Control_String (OUTPUT_DATA_FORMAT_x, i); if (format_ptr != NULL) { group_ptr->Output_File ()->Dbase_Format (format_ptr); } else if (output_data [0] != '\0') { group_ptr->Output_File ()->Dbase_Format (output_data); } if (!group_ptr->Output_File ()->Open (Project_Filename (str_ptr))) { File_Error (buffer, group_ptr->Output_File ()->Filename ()); } //---- get the distribution file ---- str_ptr = Get_Control_String (DISTRIBUTION_FILE_x, i); if (str_ptr == NULL) { if (initial_flag) { str_ptr = initial_file; if (initial_format [0] != '\0') { format_ptr = initial_format; } else { format_ptr = NULL; } } } else { format_ptr = Get_Control_String (DISTRIBUTION_FORMAT_x, i); } if (str_ptr != NULL) { group_ptr->Initial_File (new Diurnal_File ()); str_fmt (buffer, sizeof (buffer), "Distribution File #%d", i); group_ptr->Initial_File ()->File_Type (buffer); if (format_ptr != NULL) { group_ptr->Initial_File ()->Dbase_Format (format_ptr); } if (!group_ptr->Initial_File ()->Open (Project_Filename (str_ptr))) { File_Error (buffer, group_ptr->Initial_File ()->Filename ()); } } else { group_ptr->Initial_File (NULL); } //---- get the time of day format ---- str_ptr = Get_Control_String (INPUT_TIME_FORMAT_x, i); if (str_ptr == NULL) { str_ptr = input_format; } if (!group_ptr->Input_Format (str_ptr)) { Error ("Input Time Format %s was Unrecognized", str_ptr); } Print (1, "Input Time Format = %s", str_ptr); str_ptr = Get_Control_String (OUTPUT_TIME_FORMAT_x, i); if (str_ptr == NULL) { str_ptr = output_format; } if (!group_ptr->Output_Format (str_ptr)) { Error ("Output Time Format %s was Unrecognized", str_ptr); } Print (1, "Output Time Format = %s", str_ptr); //---- read the field number ---- max_field = group_ptr->Input_File ()->Num_Fields (); str_ptr = Get_Control_String (SMOOTH_FIELD_NUMBER_x, i); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 1 || lvalue > max_field) goto field_error; } else { lvalue = nfield; } group_ptr->Field_Number (lvalue); if (lvalue > 0) { Print (1, "Smooth Field Number = %d", lvalue); group_ptr->Input_File ()->Share_Field (lvalue); } //---- time increment ---- str_ptr = Get_Control_String (SMOOTH_TIME_INCREMENT_x, i); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 0 || lvalue > 3600) goto increment_error; } else { lvalue = increment; } if (lvalue > 0) { Print (1, "Smooth Time Increment = %d seconds", lvalue); max_size = 2 * 3600 / lvalue + 1; max_iter = (max_size + 1) / 2; if (max_iter > 100) max_iter = 100; max_iter *= max_iter; naverage = default_average; niter = default_iter; if (niter == 0) { niter = max_iter / 2; } } else { Print (1, "Smooth Based on Input Time Periods"); max_size = 9; max_iter = 25; naverage = default_average; niter = default_iter; } group_ptr->Increment (lvalue); //---- read the number smooth records ---- str_ptr = Get_Control_String (SMOOTH_GROUP_SIZE_x, i); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 3 || lvalue > max_size) goto size_error; if ((lvalue % 2) != 1) goto odd_error; } else { lvalue = naverage; } Print (1, "Smooth Group Size = %d", lvalue); group_ptr->Num_Average (lvalue); //---- read the percent distributed forward ---- factor = 100.0 - 50.0 / lvalue; str_ptr = Get_Control_String (PERCENT_MOVED_FORWARD_x, i); if (str_ptr != NULL) { Get_Double (str_ptr, &dvalue); if (dvalue < 0.0 || dvalue > factor) goto forward_error; } else { dvalue = forward; } Print (1, "Percent Distributed Forward = %.1lf percent", dvalue); group_ptr->Forward (dvalue); //---- read the percent distributed backwarde ---- str_ptr = Get_Control_String (PERCENT_MOVED_BACKWARD_x, i); if (str_ptr != NULL) { Get_Double (str_ptr, &dvalue); if (dvalue < 0.0 || dvalue > factor) goto backward_error; } else { dvalue = backward; } Print (1, "Percent Distributed Backward = %.1lf percent", dvalue); group_ptr->Backward (dvalue); factor = group_ptr->Forward () + group_ptr->Backward (); dvalue = 100.0 - 100.0 / group_ptr->Num_Average (); if (factor < 5.0 || factor > dvalue) goto combined_error; //---- number of iterations ---- str_ptr = Get_Control_String (NUMBER_OF_ITERATIONS_x, i); if (str_ptr != NULL) { Get_Integer (str_ptr, &lvalue); if (lvalue < 1 || lvalue > max_iter) goto iteration_error; } else { lvalue = niter; } Print (1, "Number of Iterations = %d", lvalue); group_ptr->Num_Iteration (lvalue); //---- read the circular smoothing flag ---- str_ptr = Get_Control_String (CIRCULAR_GROUP_FLAG_x, i); if (str_ptr != NULL) { group_ptr->Loop_Flag (Get_Control_Flag (CIRCULAR_GROUP_FLAG_x, i)); } else { group_ptr->Loop_Flag (loop_flag); } Print (1, "Circular Group Flag = %s", (group_ptr->Loop_Flag () ? "TRUE" : "FALSE")); //---- read the replicate fields flag ---- if (Get_Control_String (REPLICATE_FIELDS_FLAG_x, i) != NULL) { group_ptr->Replicate_Flag (Get_Control_Flag (REPLICATE_FIELDS_FLAG_x, i)); } else { group_ptr->Replicate_Flag (replicate_flag); } Print (1, "Replicate Fields Flag = %s", (group_ptr->Replicate_Flag () ? "TRUE" : "FALSE")); if (group_ptr->Replicate_Flag ()) { int field, num; bool binary; Db_Field *fld; Diurnal_File *file; group_ptr->Output_File ()->Share_Field (0); group_ptr->Output_File ()->Clear_Fields (); //---- copy all fields ---- file = group_ptr->Input_File (); binary = (file->Record_Format () == Db_Code::BINARY); num = file->Num_Fields (); for (field=1; field <= num; field++) { fld = file->Field (field); if (fld == NULL) continue; if (field == file->Start_Field () || field == file->End_Field ()) { group_ptr->Output_File ()->Add_Field (fld->Name (), Db_Code::STRING, 20); } else { group_ptr->Output_File ()->Add_Field (fld->Name (), fld->Type (), fld->Size (), fld->Decimal (), -1, binary); } } group_ptr->Output_File ()->Write_Header (); group_ptr->Output_File ()->Share_Field (group_ptr->Input_File ()->Share_Field ()); } //---- add the group record ---- if (!file_group.Add ()) { Error ("Adding File Group"); } } return; //---- error message ---- control_error: Error ("Missing Control Key = %s", Current_Key ()); field_error: Error ("Field Number %d is Out of Range (1..%d)", lvalue, max_field); increment_error: Error ("Smooth Time Increment %d is Out of Range (0..3600)", lvalue); size_error: Error ("Smooth Group Size %d is Out of Range (1..%d)", lvalue, max_size); odd_error: Error ("Smooth Group Size %d must be an Odd Number", lvalue); forward_error: Error ("Forward Percentage %.1lf is Out of Range (0..%.1lf)", dvalue, factor); backward_error: Error ("Backward Percentage %.1lf is Out of Range (0..%.1lf)", dvalue, factor); combined_error: Error ("Combined Distribution Percentage %.1lf is Out of Range (5..%.0lf)", factor, dvalue); iteration_error: Error ("Number of Iterations %d is Out of Range (1-%d)", lvalue, max_iter); }