wxLineChartPanel::wxLineChartPanel(wxWindow* parent) : wxPanel(parent) { // Create the data for the line chart widget wxVector<wxString> labels; labels.push_back("January"); labels.push_back("February"); labels.push_back("March"); labels.push_back("April"); labels.push_back("May"); labels.push_back("June"); labels.push_back("July"); wxLineChartData chartData(labels); // Add the first dataset wxVector<wxDouble> points1; points1.push_back(3); points1.push_back(-2.5); points1.push_back(-1.2); points1.push_back(3); points1.push_back(6); points1.push_back(5); points1.push_back(1); wxLineChartDataset::ptr dataset1(new wxLineChartDataset( "My First Dataset", wxColor(220, 220, 220), wxColor(255, 255, 255), wxColor(220, 220, 220, 0x33), points1)); chartData.AddDataset(dataset1); // Add the second dataset wxVector<wxDouble> points2; points2.push_back(1); points2.push_back(-1.33); points2.push_back(2.5); points2.push_back(7); points2.push_back(3); points2.push_back(-1.8); points2.push_back(0.4); wxLineChartDataset::ptr dataset2(new wxLineChartDataset( "My Second Dataset", wxColor(151, 187, 205), wxColor(255, 255, 255), wxColor(151, 187, 205, 0x33), points2)); chartData.AddDataset(dataset2); // Create the line chart widget from the constructed data m_lineChart = new wxLineChartCtrl(this, wxID_ANY, chartData, wxDefaultPosition, wxDefaultSize, wxBORDER_NONE); // Set up the sizer for the panel wxBoxSizer* sizer = new wxBoxSizer(wxHORIZONTAL); sizer->Add(m_lineChart, 1, wxEXPAND); SetSizer(sizer); }
int solve(const std::vector<int>& dataset) { if (dataset.size() == 1) { return dataset[0]; } else { std::vector<int> dataset2(dataset.size() - 1); for (int i = 0; i < dataset2.size(); ++i) { dataset2[i] = (dataset[i] + dataset[i+1]) % 10; // std::cout << dataset2[i] << " "; } // std::cout << std::endl; return solve(dataset2); } }
CheckParallelPassages::CheckParallelPassages(bool nt, const ustring & project, const vector < unsigned int >&books, bool includetext, bool gui,const ustring & project2) { // Language. extern Settings *settings; ustring language = settings->projectconfig(project, false)->language_get(); // Mapping. ustring versification = settings->projectconfig(project, false)->versification_get(); Mapping mapping(versification, 0); // Get a list of the books to check. If no books were given, take them all. vector < unsigned int >mybooks(books.begin(), books.end()); if (mybooks.empty()) mybooks = project_get_books(project); set < unsigned int >bookset(mybooks.begin(), mybooks.end()); // Get the parallel passages. OtNtParallels otntparallels(0); if (nt) otntparallels.readnt(); else otntparallels.readot(); // GUI. progresswindow = NULL; if (gui) { progresswindow = new ProgressWindow(_("Producing passages"), true); progresswindow->set_iterate(0, 1, otntparallels.sections.size()); } // Go through each section. for (unsigned int i = 0; i < otntparallels.sections.size(); i++) { if (gui) { progresswindow->iterate(); if (progresswindow->cancel) return; } OtNtParallelDataSection datasection(0); // Section's heading. datasection.title = otntparallels.sections[i].title; // Go through each set of references. for (unsigned int i2 = 0; i2 < otntparallels.sections[i].sets.size(); i2++) { // Go through the references in the set. OtNtParallelDataSet dataset(0); for (unsigned int i3 = 0; i3 < otntparallels.sections[i].sets[i2].references.size(); i3++) { // Skip if NT book is not to be included. if (bookset.find(otntparallels.sections[i].sets[i2].references[i3].book) == bookset.end()) continue; vector < int >remapped_chapter; vector < int >remapped_verse; mapping.book_change(otntparallels.sections[i].sets[i2].references[i3].book); mapping.original_to_me(otntparallels.sections[i].sets[i2].references[i3].chapter, otntparallels.sections[i].sets[i2].references[i3].verse, remapped_chapter, remapped_verse); Reference mapped_reference(otntparallels.sections[i].sets[i2].references[i3].book, remapped_chapter[0], convert_to_string(remapped_verse[0])); ustring verse = mapped_reference.human_readable(language); if (includetext) { verse.append(" "); verse.append(usfm_get_verse_text_only(project_retrieve_verse(project, mapped_reference.book, mapped_reference.chapter, mapped_reference.verse))); } dataset.data.push_back(verse); references.push_back(books_id_to_english(mapped_reference.book) + " " + convert_to_string(mapped_reference.chapter) + ":" + mapped_reference.verse); comments.push_back(_("Parallel")); } datasection.sets.push_back(dataset); //output verses of second project OtNtParallelDataSet dataset2(0); //TODO refactor this loop into a separate function if (project2!="") { ustring language2 = settings->projectconfig(project2, false)->language_get(); // Mapping. ustring versification2 = settings->projectconfig(project2, false)->versification_get(); Mapping mapping2(versification2, 0); for (unsigned int i3 = 0; i3 < otntparallels.sections[i].sets[i2].references.size(); i3++) { // Skip if NT book is not to be included. if (bookset.find(otntparallels.sections[i].sets[i2].references[i3].book) == bookset.end()) continue; vector < int >remapped_chapter; vector < int >remapped_verse; mapping2.book_change(otntparallels.sections[i].sets[i2].references[i3].book); mapping2.original_to_me(otntparallels.sections[i].sets[i2].references[i3].chapter, otntparallels.sections[i].sets[i2].references[i3].verse, remapped_chapter, remapped_verse); Reference mapped_reference(otntparallels.sections[i].sets[i2].references[i3].book, remapped_chapter[0], convert_to_string(remapped_verse[0])); ustring verse = mapped_reference.human_readable(language); if (includetext) { verse.append(" "); verse.append(usfm_get_verse_text_only(project_retrieve_verse(project2, mapped_reference.book, mapped_reference.chapter, mapped_reference.verse))); } dataset2.data.push_back(verse); references.push_back(books_id_to_english(mapped_reference.book) + " " + convert_to_string(mapped_reference.chapter) + ":" + mapped_reference.verse); comments.push_back(_("Parallel")); } datasection.sets.push_back(dataset2); } } data.push_back(datasection); } }
void save_particle_space(const Tspace_& space, H5::Group* root) { typedef ParticleSpaceHDF5Traits traits_type; typedef typename traits_type::h5_species_struct h5_species_struct; typedef typename traits_type::h5_particle_struct h5_particle_struct; typedef std::vector<std::pair<ParticleID, Particle> > particle_container_type; const particle_container_type& particles(space.list_particles()); const unsigned int num_particles(particles.size()); std::vector<Species> species; typedef utils::get_mapper_mf<Species::serial_type, unsigned int>::type species_id_map_type; species_id_map_type species_id_map; boost::scoped_array<h5_particle_struct> h5_particle_table(new h5_particle_struct[num_particles]); for (unsigned int i(0); i < num_particles; ++i) { species_id_map_type::const_iterator it(species_id_map.find(particles[i].second.species_serial())); if (it == species_id_map.end()) { species.push_back(particles[i].second.species()); it = species_id_map.insert( std::make_pair(particles[i].second.species_serial(), species.size())).first; } h5_particle_table[i].lot = particles[i].first.lot(); h5_particle_table[i].serial = particles[i].first.serial(); h5_particle_table[i].sid = (*it).second; h5_particle_table[i].posx = particles[i].second.position()[0]; h5_particle_table[i].posy = particles[i].second.position()[1]; h5_particle_table[i].posz = particles[i].second.position()[2]; h5_particle_table[i].radius = particles[i].second.radius(); h5_particle_table[i].D = particles[i].second.D(); } boost::scoped_array<h5_species_struct> h5_species_table(new h5_species_struct[species.size()]); for (unsigned int i(0); i < species.size(); ++i) { h5_species_table[i].id = i + 1; std::strcpy(h5_species_table[i].serial, species[i].serial().c_str()); } const int RANK = 1; hsize_t dim1[] = {num_particles}; H5::DataSpace dataspace1(RANK, dim1); boost::scoped_ptr<H5::DataSet> dataset1(new H5::DataSet( root->createDataSet( "particles", traits_type::get_particle_comp_type(), dataspace1))); hsize_t dim2[] = {species.size()}; H5::DataSpace dataspace2(RANK, dim2); boost::scoped_ptr<H5::DataSet> dataset2(new H5::DataSet( root->createDataSet( "species", traits_type::get_species_comp_type(), dataspace2))); dataset1->write(h5_particle_table.get(), dataset1->getDataType()); dataset2->write(h5_species_table.get(), dataset2->getDataType()); const uint32_t space_type = static_cast<uint32_t>(Space::PARTICLE); H5::Attribute attr_space_type( root->createAttribute( "type", H5::PredType::STD_I32LE, H5::DataSpace(H5S_SCALAR))); attr_space_type.write(H5::PredType::STD_I32LE, &space_type); const double t = space.t(); H5::Attribute attr_t( root->createAttribute( "t", H5::PredType::IEEE_F64LE, H5::DataSpace(H5S_SCALAR))); attr_t.write(H5::PredType::IEEE_F64LE, &t); const Real3 edge_lengths = space.edge_lengths(); const hsize_t dims[] = {3}; const H5::ArrayType lengths_type(H5::PredType::NATIVE_DOUBLE, 1, dims); H5::Attribute attr_lengths( root->createAttribute( "edge_lengths", lengths_type, H5::DataSpace(H5S_SCALAR))); double lengths[] = {edge_lengths[0], edge_lengths[1], edge_lengths[2]}; attr_lengths.write(lengths_type, lengths); }
CheckParallelPassages::CheckParallelPassages(bool nt, const ustring & project, const vector < unsigned int >&books, bool includetext, bool gui,const ustring & project2) { // Language. extern Settings *settings; ustring language = settings->projectconfig(project, false)->language_get(); // Mapping. ustring versification = settings->projectconfig(project, false)->versification_get(); Mapping mapping(versification, 0); // Get a list of the books to check. If no books were given, take them all. vector < unsigned int >mybooks(books.begin(), books.end()); if (mybooks.empty()) { mybooks = project_get_books(project); } set < unsigned int >bookset(mybooks.begin(), mybooks.end()); // Get the parallel passages. OtNtParallels otntparallels(0); if (nt) { otntparallels.readnt(); } else { otntparallels.readot(); } // GUI. progresswindow = NULL; if (gui) { progresswindow = new ProgressWindow(_("Producing passages"), true); progresswindow->set_iterate(0, 1, otntparallels.sections.size()); } // Go through each section. for (unsigned int i = 0; i < otntparallels.sections.size(); i++) { if (gui) { progresswindow->iterate(); if (progresswindow->cancel) return; } OtNtParallelDataSection datasection(0); // Section's heading. datasection.title = otntparallels.sections[i].title; // Go through each set of references. for (unsigned int i2 = 0; i2 < otntparallels.sections[i].sets.size(); i2++) { // Go through the references in the set. OtNtParallelDataSet dataset(0); for (unsigned int i3 = 0; i3 < otntparallels.sections[i].sets[i2].references.size(); i3++) { // Skip if NT book is not to be included. if (bookset.find(otntparallels.sections[i].sets[i2].references[i3].book_get()) == bookset.end()) { continue; } vector < int >remapped_chapter; vector < int >remapped_verse; mapping.book_change(otntparallels.sections[i].sets[i2].references[i3].book_get()); mapping.original_to_me(otntparallels.sections[i].sets[i2].references[i3].chapter_get(), otntparallels.sections[i].sets[i2].references[i3].verse_get(), remapped_chapter, remapped_verse); ustring verse; // The verse can have a range, like Matthew 3:1-2, and we have to handle all verses in that range. // Prior to 5/23/2016, all that was used was remapped_verse[0], which destroyed any other verses in the range. for (unsigned int i4 = 0; i4 < remapped_verse.size(); i4++) { Reference mapped_reference(otntparallels.sections[i].sets[i2].references[i3].book_get(), remapped_chapter[0], convert_to_string(remapped_verse[i4])); if (i4 > 0) { // For the second verse and beyond, we need to put an extra space and handle the verse number specially. verse.append(" [" + mapped_reference.verse_get() + "]"); } else { verse.append(mapped_reference.human_readable(language)); } if (includetext) { verse.append(" "); verse.append(usfm_get_verse_text_only(project_retrieve_verse(project, mapped_reference.book_get(), mapped_reference.chapter_get(), mapped_reference.verse_get()))); } } dataset.data.push_back(verse); // references.push_back(books_id_to_localname(mapped_reference.book_get()) + " " + convert_to_string(mapped_reference.chapter_get()) + ":" + mapped_reference.verse_get()); references.push_back(books_id_to_localname(otntparallels.sections[i].sets[i2].references[i3].book_get()) + " " + convert_to_string(otntparallels.sections[i].sets[i2].references[i3].chapter_get()) + ":" + otntparallels.sections[i].sets[i2].references[i3].verse_get()); comments.push_back(_("Parallel")); } datasection.sets.push_back(dataset); //output verses of second project OtNtParallelDataSet dataset2(0); //TODO refactor this loop into a separate function if (project2!="") { ustring language2 = settings->projectconfig(project2, false)->language_get(); // Mapping. ustring versification2 = settings->projectconfig(project2, false)->versification_get(); Mapping mapping2(versification2, 0); for (unsigned int i3 = 0; i3 < otntparallels.sections[i].sets[i2].references.size(); i3++) { // Skip if NT book is not to be included. if (bookset.find(otntparallels.sections[i].sets[i2].references[i3].book_get()) == bookset.end()) continue; vector < int >remapped_chapter; vector < int >remapped_verse; mapping2.book_change(otntparallels.sections[i].sets[i2].references[i3].book_get()); mapping2.original_to_me(otntparallels.sections[i].sets[i2].references[i3].chapter_get(), otntparallels.sections[i].sets[i2].references[i3].verse_get(), remapped_chapter, remapped_verse); Reference mapped_reference(otntparallels.sections[i].sets[i2].references[i3].book_get(), remapped_chapter[0], convert_to_string(remapped_verse[0])); ustring verse = mapped_reference.human_readable(language); if (includetext) { verse.append(" "); verse.append(usfm_get_verse_text_only(project_retrieve_verse(project2, mapped_reference.book_get(), mapped_reference.chapter_get(), mapped_reference.verse_get()))); } dataset2.data.push_back(verse); references.push_back(books_id_to_localname(mapped_reference.book_get()) + " " + convert_to_string(mapped_reference.chapter_get()) + ":" + mapped_reference.verse_get()); comments.push_back(_("Parallel")); } datasection.sets.push_back(dataset2); } } data.push_back(datasection); } }
WxBarFrame::WxBarFrame(const wxString& title) : wxFrame(NULL, wxID_ANY, title) { // Create a top-level panel to hold all the contents of the frame wxPanel* panel = new wxPanel(this, wxID_ANY); // Create the data for the bar chart widget wxVector<wxString> labels; labels.push_back("January"); labels.push_back("February"); labels.push_back("March"); labels.push_back("April"); labels.push_back("May"); labels.push_back("June"); labels.push_back("July"); wxBarChartData chartData(labels); // Add the first dataset wxVector<wxDouble> points1; points1.push_back(3); points1.push_back(2.5); points1.push_back(1.2); points1.push_back(3); points1.push_back(6); points1.push_back(5); points1.push_back(1); wxBarChartDataset::ptr dataset1( new wxBarChartDataset( wxColor(220, 220, 220, 0x7F), wxColor(220, 220, 220, 0xCC), points1) ); chartData.AddDataset(dataset1); // Add the second dataset wxVector<wxDouble> points2; points2.push_back(1); points2.push_back(1.33); points2.push_back(2.5); points2.push_back(2); points2.push_back(3); points2.push_back(1.8); points2.push_back(0.4); wxBarChartDataset::ptr dataset2(new wxBarChartDataset( wxColor(151, 187, 205, 0x7F), wxColor(151, 187, 205, 0xFF), points2)); chartData.AddDataset(dataset2); // Create the bar chart widget wxBarChartCtrl* barChartCtrl = new wxBarChartCtrl(panel, wxID_ANY, chartData); // Set up the sizer for the panel wxBoxSizer* panelSizer = new wxBoxSizer(wxHORIZONTAL); panelSizer->Add(barChartCtrl, 1, wxEXPAND); panel->SetSizer(panelSizer); // Set up the sizer for the frame wxBoxSizer* topSizer = new wxBoxSizer(wxHORIZONTAL); topSizer->Add(panel, 1, wxEXPAND); SetSizerAndFit(topSizer); }