// Gets the raw USFM for the bible and passage given. string search_logic_get_bible_verse_usfm (string bible, int book, int chapter, int verse) { vector <string> texts; string path = search_logic_chapter_file (bible, book, chapter); string index = filter_url_file_get_contents (path); vector <string> lines = filter_string_explode (index, '\n'); int index_verse = 0; bool read_index_verse = false; int index_item = 0; for (auto & line : lines) { if (read_index_verse) { index_verse = convert_to_int (line); read_index_verse = false; } else if (line == search_logic_verse_separator ()) { read_index_verse = true; index_item = 0; } else if (line == search_logic_index_separator ()) { index_item++; } else if (index_item == USFM_RAW) { if (verse == index_verse) { texts.push_back (line); } } } return filter_string_implode (texts, "\n"); }
// Return the raw data of default versification system $name. string versification_logic_data (string name) { name = filter_string_str_replace (" ", "_", name); name.append (".txt"); string file = filter_url_create_root_path ("versification", name); return filter_url_file_get_contents (file); }
// If $command and $parameters are queued as a task, the function returns true. // Else it returns false. // It looks for an exact match. // Parameters left out are not checked. bool tasks_logic_queued (string command, vector <string> parameters) { // The lines to look for consist of the command followed by the parameters. vector <string> search (parameters); search.insert (search.begin (), command); // Go through all queued tasks. vector <string> files = filter_url_scandir (tasks_logic_folder ()); for (auto & file : files) { // Read the task's contents. string contents = filter_url_file_get_contents (filter_url_create_path (tasks_logic_folder (), file)); vector <string> lines = filter_string_explode (contents, '\n'); if (lines.empty ()) return false; // Look for a match. bool match = true; for (size_t i = 0; i < search.size (); i++) { if (i < lines.size ()) { if (search [i] != lines[i]) match = false; } else { match = false; } } if (match) return true; } // No match found. return false; }
string Database_Config_General::getValue (const char * key, const char * default_value) { string value; string filename = file (key); if (file_or_dir_exists (filename)) value = filter_url_file_get_contents (filename); else value = default_value; return value; }
// Return the network port configured for the server. string config_logic_http_network_port () { // Read the port number from file. string path = filter_url_create_root_path (config_logic_config_folder (), "network-port"); string port = filter_url_file_get_contents (path); // Remove white-space, e.g. a new line, that easily makes its way into the configuration file. port = filter_string_trim (port); // Default value. if (port.empty ()) port = "8080"; // Done. return port; }
string index_listing (void * webserver_request, string url) { string page; page = Assets_Page::header ("Bibledit", webserver_request); // No breadcrumbs because the user can arrive here from more than one place. Assets_View view; url = filter_url_urldecode (url); url = filter_url_create_path ("", url); url = filter_string_str_replace ("\\", "/", url); view.set_variable ("url", url); string parent = filter_url_dirname_web (url); if (parent.length () > 1) { view.enable_zone ("parent"); view.set_variable ("parent", parent); } string directory = filter_url_create_root_path (url); if (!file_or_dir_exists (directory) || filter_url_is_dir (directory)) { vector <string> files = filter_url_scandir (directory); for (auto & file : files) { string path = filter_url_create_path (directory, file); string line; line.append ("<tr>"); line.append ("<td>"); line.append ("<a href=\"" + filter_url_create_path (url, file) + "\">"); line.append (file); line.append ("</a>"); line.append ("</td>"); line.append ("<td>"); if (!filter_url_is_dir (path)) { line.append (convert_to_string (filter_url_filesize (path))); } line.append ("</td>"); line.append ("</tr>"); file = line; } string listing = filter_string_implode (files, "\n"); if (listing.empty ()) listing = translate ("No files in this folder"); else { listing.insert (0, "<table>"); listing.append ("</table>"); } view.set_variable ("listing", listing); } else { string filename = filter_url_create_root_path (url); return filter_url_file_get_contents (filename); } page += view.render ("index", "listing"); page += Assets_Page::footer (); return page; }
// Gets journal entry more recent than "filename". // Updates "filename" to the item it got. string Database_Logs::getNext (string &filename) { string directory = folder (); vector <string> files = filter_url_scandir (directory); for (unsigned int i = 0; i < files.size (); i++) { string file = files [i]; if (file > filename) { filename = file; string path = filter_url_create_path (directory, file); string contents = filter_url_file_get_contents (path); return contents; } } return ""; }
// Compresses a $folder into gzipped tar format. // Returns the path to the compressed archive it created. string filter_archive_tar_gzip_folder (string folder) { string tarball = filter_url_tempfile () + ".tar.gz"; folder = filter_url_escape_shell_argument (folder); string logfile = filter_url_tempfile () + ".log"; string command = "cd " + folder + " && tar -czf " + tarball + " . > " + logfile + " 2>&1"; int return_var = system (command.c_str()); if (return_var != 0) { filter_url_unlink (tarball); tarball.clear(); string errors = filter_url_file_get_contents (logfile); Database_Logs::log (errors); } return tarball; }
// Prepares a sample Bible. // The output of this is supposed to be manually put into the source tree, folder "samples". // This will be used to quickly create a sample Bible, that is fast, even on mobile devices. void demo_prepare_sample_bible () { Database_Bibles database_bibles; // Remove the Bible to remove all stuff that might have been in it. database_bibles.deleteBible (demo_sample_bible_name ()); search_logic_delete_bible (demo_sample_bible_name ()); // Create a new one. database_bibles.createBible (demo_sample_bible_name ()); // Location of the USFM files for the sample Bible. string directory = filter_url_create_root_path ("demo"); vector <string> files = filter_url_scandir (directory); for (auto file : files) { // Only process the USFM files. if (filter_url_get_extension (file) == "usfm") { cout << file << endl; // Read the USFM. file = filter_url_create_path (directory, file); string usfm = filter_url_file_get_contents (file); usfm = filter_string_str_replace (" ", " ", usfm); // Import the USFM into the Bible. vector <BookChapterData> book_chapter_data = usfm_import (usfm, styles_logic_standard_sheet ()); for (auto data : book_chapter_data) { Bible_Logic::storeChapter (demo_sample_bible_name (), data.book, data.chapter, data.data); } } } // Clean the destination location for the Bible. string destination = sample_bible_bible_path (); filter_url_rmdir (destination); // Copy the Bible data to the destination. string source = database_bibles.bibleFolder (demo_sample_bible_name ()); filter_url_dir_cp (source, destination); // Clean the destination location for the Bible search index. destination = sample_bible_index_path (); filter_url_rmdir (destination); // Create destination location. filter_url_mkdir (destination); // Copy the index files over to the destination. source = search_logic_index_folder (); files = filter_url_scandir (source); for (auto file : files) { if (file.find (demo_sample_bible_name ()) != string::npos) { string source_file = filter_url_create_path (source, file); string destination_file = filter_url_create_path (destination, file); filter_url_file_cp (source_file, destination_file); } } }
// Compresses a $folder into zip format. // Returns the path to the compressed archive it created. string filter_archive_zip_folder (string folder) { if (!file_or_dir_exists (folder)) return ""; string zippedfile = filter_url_tempfile () + ".zip"; string logfile = filter_url_tempfile () + ".log"; folder = filter_url_escape_shell_argument (folder); string command = "cd " + folder + " && zip -r " + zippedfile + " * > " + logfile + " 2>&1"; int return_var = system (command.c_str()); if (return_var != 0) { filter_url_unlink (zippedfile); zippedfile.clear(); string errors = filter_url_file_get_contents (logfile); Database_Logs::log (errors); } return zippedfile; }
// Uncompresses a .tar.gz archive identified by $file. // Returns the path to the folder it created. string filter_archive_untar_gzip (string file) { file = filter_url_escape_shell_argument (file); string folder = filter_url_tempfile (); filter_url_mkdir (folder); folder.append (DIRECTORY_SEPARATOR); string logfile = filter_url_tempfile () + ".log"; string command = "cd " + folder + " && tar zxf " + file + " > " + logfile + " 2>&1"; int return_var = system (command.c_str()); if (return_var != 0) { filter_url_rmdir (folder); folder.clear(); string errors = filter_url_file_get_contents (logfile); Database_Logs::log (errors); } return folder; }
// This returns the filtered value of file userfacingurl.conf. string config_logic_manual_user_facing_url () { #ifdef HAVE_CLIENT return ""; #else // Read the configuration file. string path = filter_url_create_root_path (config_logic_config_folder (), "userfacingurl.conf"); string url = filter_url_file_get_contents (path); // Remove white space. url = filter_string_trim (url); // The previous file contained dummy text by default. Remove that. if (url.length () <= 6) url.clear (); // Ensure it ends with a slash. if (url.find_last_of ("/") != url.length () - 1) url.append ("/"); // Done. return url; #endif }
// Searches the text of the Bibles. // Returns an array with matching passages. // $search: Contains the text to search for. // $bibles: Array of Bible names to search in. vector <Passage> search_logic_search_text (string search, vector <string> bibles) { vector <Passage> passages; if (search == "") return passages; search = unicode_string_casefold (search); search = filter_string_str_replace (",", "", search); Database_Bibles database_bibles; for (auto bible : bibles) { vector <int> books = database_bibles.getBooks (bible); for (auto book : books) { vector <int> chapters = database_bibles.getChapters (bible, book); for (auto chapter : chapters) { string path = search_logic_chapter_file (bible, book, chapter); string index = filter_url_file_get_contents (path); if (index.find (search) != string::npos) { vector <string> lines = filter_string_explode (index, '\n'); int index_verse = 0; bool read_index_verse = false; int index_item = 0; for (auto & line : lines) { if (read_index_verse) { index_verse = convert_to_int (line); read_index_verse = false; } else if (line == search_logic_verse_separator ()) { read_index_verse = true; index_item = 0; } else if (line == search_logic_index_separator ()) { index_item++; } else if (index_item == PLAIN_LOWER) { if (line.find (search) != string::npos) { passages.push_back (Passage (bible, book, chapter, convert_to_string (index_verse))); } } } } } } } return passages; }
// This function serves a file and enables caching by the browser. void http_serve_cache_file (Webserver_Request * request) { // Full path to the file. string filename = filter_url_create_root_path (filter_url_urldecode (request->get)); // File size for browser caching. int size = filter_url_filesize (filename); request->etag = "\"" + convert_to_string (size) + "\""; // Deal with situation that the file in the browser's cache is up to date. // https://developers.google.com/web/fundamentals/performance/optimizing-content-efficiency/http-caching if (request->etag == request->if_none_match) { request->response_code = 304; return; } // Get file's contents. request->reply = filter_url_file_get_contents (filename); }
// Get the logbook entries. vector <string> Database_Logs::get (string & lastfilename) { lastfilename = "0"; // Read entries from the filesystem. vector <string> entries; string directory = folder (); vector <string> files = filter_url_scandir (directory); for (unsigned int i = 0; i < files.size(); i++) { string file = files [i]; string path = filter_url_create_path (directory, file); string contents = filter_url_file_get_contents (path); entries.push_back (contents); // Last second gets updated based on the filename. lastfilename = file; } // Done. return entries; }
// Returns the total verse count within a $bible. int search_logic_get_verse_count (string bible) { int verse_count = 0; Database_Bibles database_bibles; vector <int> books = database_bibles.getBooks (bible); for (auto book : books) { vector <int> chapters = database_bibles.getChapters (bible, book); for (auto chapter : chapters) { string path = search_logic_chapter_file (bible, book, chapter); string index = filter_url_file_get_contents (path); vector <string> lines = filter_string_explode (index, '\n'); for (auto & line : lines) { if (line == search_logic_verse_separator ()) { verse_count++; } } } } return verse_count; }
void bible_import_run (string location, string bible, int book, int chapter) { Database_Logs::log ("Importing Bible data from location " + location + " into Bible " + bible); string folder = filter_archive_uncompress (location); if (!folder.empty ()) location = folder; vector <string> files; if (filter_url_is_dir (location)) { filter_url_recursive_scandir (location, files); } else { files.push_back (location); } for (auto & file : files) { if (filter_url_is_dir (file)) continue; Database_Logs::log ("Examining file for import: " + file); string success_message = ""; string error_message = ""; string data = filter_url_file_get_contents (file); if (data != "") { if (unicode_string_is_valid (data)) { // Check whether this is USFM data. bool id = data.find ("\\id ") != string::npos; bool c = data.find ("\\c ") != string::npos; if (id || c) { bible_import_usfm (data, bible, book, chapter); } else { bible_import_text (data, bible, book, chapter); } } else { Database_Logs::log ("The file does not contain valid Unicode UTF-8 text.", true); } } else { Database_Logs::log ("Nothing was imported.", true); } } Database_Logs::log ("Import Bible data has finished"); }
// Performs a case-sensitive search of the USFM of one $bible. // Returns an array with the rowid's of matching verses. // $search: Contains the text to search for. vector <Passage> search_logic_search_bible_usfm_case_sensitive (string bible, string search) { vector <Passage> passages; if (search == "") return passages; Database_Bibles database_bibles; vector <int> books = database_bibles.getBooks (bible); for (auto book : books) { vector <int> chapters = database_bibles.getChapters (bible, book); for (auto chapter : chapters) { string path = search_logic_chapter_file (bible, book, chapter); string index = filter_url_file_get_contents (path); if (index.find (search) != string::npos) { vector <string> lines = filter_string_explode (index, '\n'); int index_verse = 0; bool read_index_verse = false; int index_item = 0; for (auto & line : lines) { if (read_index_verse) { index_verse = convert_to_int (line); read_index_verse = false; } else if (line == search_logic_verse_separator ()) { read_index_verse = true; index_item = 0; } else if (line == search_logic_index_separator ()) { index_item++; } else if (index_item == USFM_RAW) { if (line.find (search) != string::npos) { passages.push_back (Passage (bible, book, chapter, convert_to_string (index_verse))); } } } } } } return passages; }
// Uncompresses a zip archive identified by $file. // Returns the path to the folder it created. string filter_archive_unzip (string file) { string folder = filter_url_tempfile (); filter_url_mkdir (folder); folder.append (DIRECTORY_SEPARATOR); string logfile = filter_url_tempfile () + ".log"; file = filter_url_escape_shell_argument (file); string command = "unzip -o -d " + folder + " " + file + " > " + logfile + " 2>&1"; int return_var = system (command.c_str()); if (return_var != 0) { filter_url_rmdir (folder); folder.clear(); string errors = filter_url_file_get_contents (logfile); Database_Logs::log (errors); } else { // Set free permissions after unzipping. command = "chmod -R 0777 " + folder; int result = system (command.c_str ()); (void) result; } return folder; }
vector <string> Database_NoteAssignment::assignees (string user) { string contents = filter_url_file_get_contents (path (user)); return filter_string_explode (contents, '\n'); }
// Get available SWORD modules. vector <string> sword_logic_get_available () { string contents = filter_url_file_get_contents (sword_logic_module_list_path ()); return filter_string_explode (contents, '\n'); }
void Database_Logs::rotate () { // Remove the database that was used in older versions of Bibledit. // Since February 2016 Bibledit no longer uses a database for storing the journal. // Reasons that a database is no longer used: // 1. Simpler system. // 2. Android has VACUUM errors due to a locked database. string old_database_file = database_sqlite_file ("logs2"); if (file_exists (old_database_file)) { filter_url_unlink (old_database_file); } // Use a mechanism that handles huge amounts of entries. // The PHP function scandir choked on this or took a very long time. // The PHP functions opendir / readdir / closedir handled it better. // But now, in C++, with the new journal mechanism, this is no longer relevant. string directory = folder (); vector <string> files = filter_url_scandir (directory); // Timestamp for removing older records, depending on whether it's a tiny journal. #ifdef HAVE_TINYJOURNAL int oldtimestamp = filter_date_seconds_since_epoch () - (14400); #else int oldtimestamp = filter_date_seconds_since_epoch () - (6 * 86400); #endif // Limit the available the journal entrie count in the filesystem. // This speeds up subsequent reading of the Journal by the users. // In previous versions of Bibledit, there were certain conditions // that led to an infinite loop, as had been noticed at times, // and this quickly exhausted the available inodes on the filesystem. #ifdef HAVE_TINYJOURNAL int limitfilecount = files.size () - 200; #else int limitfilecount = files.size () - 2000; #endif bool filtered_entries = false; for (unsigned int i = 0; i < files.size(); i++) { string path = filter_url_create_path (directory, files [i]); // Limit the number of journal entries. if ((int)i < limitfilecount) { filter_url_unlink (path); continue; } // Remove expired entries. int timestamp = convert_to_int (files [i].substr (0, 10)); if (timestamp < oldtimestamp) { filter_url_unlink (path); continue; } // Filtering of certain entries. string entry = filter_url_file_get_contents (path); if (journal_logic_filter_entry (entry)) { filtered_entries = true; filter_url_unlink (path); continue; } } if (filtered_entries) { log (journal_logic_filtered_message ()); } }
// The configured admin's email. string config_logic_admin_email () { string path = filter_url_create_root_path (config_logic_config_folder (), "admin-email"); return filter_string_trim (filter_url_file_get_contents (path)); }
int main (int argc, char **argv) { if (argc) {}; if (argv[0]) {}; // Ctrl-C initiates a clean shutdown sequence, so there's no memory leak. signal (SIGINT, sigint_handler); #ifdef HAVE_EXECINFO // Handler for logging segmentation fault. signal (SIGSEGV, sigsegv_handler); #endif // Get the executable path and base the document root on it. string webroot; { // The following works on Linux but not on Mac OS X: char *linkname = (char *) malloc (256); memset (linkname, 0, 256); // valgrind uninitialized value(s) ssize_t r = readlink ("/proc/self/exe", linkname, 256); if (r) {}; webroot = filter_url_dirname (linkname); free (linkname); } { #ifdef HAVE_LIBPROC // The following works on Linux plus on Mac OS X: int ret; pid_t pid; char pathbuf [2048]; pid = getpid (); ret = proc_pidpath (pid, pathbuf, sizeof (pathbuf)); if (ret > 0 ) { webroot = filter_url_dirname (pathbuf); } #endif } bibledit_initialize_library (webroot.c_str(), webroot.c_str()); // Start the Bibledit library. bibledit_start_library (); bibledit_log ("The server started"); cout << "Listening on http://localhost:" << config_logic_http_network_port () << " and https://localhost:" << config_logic_https_network_port () << endl; cout << "Press Ctrl-C to quit" << endl; // Log possible backtrace from a previous crash. string backtrace = filter_url_file_get_contents (backtrace_path ()); filter_url_unlink (backtrace_path ()); if (!backtrace.empty ()) { Database_Logs::log ("Backtrace of the last segmentation fault:"); vector <string> lines = filter_string_explode (backtrace, '\n'); for (auto & line : lines) { Database_Logs::log (line); } } // Bibledit Cloud should restart itself at midnight. // This is to be sure that any memory leaks don't accumulate too much // in case Bibledit Cloud runs for months and years. bibledit_set_quit_at_midnight (); // Keep running till Bibledit stops or gets interrupted. while (bibledit_is_running ()) { }; bibledit_shutdown_library (); return EXIT_SUCCESS; }
string sync_files (void * webserver_request) { Webserver_Request * request = (Webserver_Request *) webserver_request; Sync_Logic sync_logic = Sync_Logic (webserver_request); if (!sync_logic.security_okay ()) { // When the Cloud enforces https, inform the client to upgrade. request->response_code = 426; return ""; } // If the client's IP address very recently made a prioritized server call, // then delay the current call. // This is the way to give priority to the other call: // Not clogging the client's internet connection. if (sync_logic.prioritized_ip_address_active ()) { this_thread::sleep_for (chrono::seconds (5)); } if (request->post.empty ()) { request->post = request->query; } string user = hex2bin (request->post ["u"]); int action = convert_to_int (request->post ["a"]); int version = convert_to_int (request->post ["v"]); size_t d = convert_to_int (request->post ["d"]); string file = request->post ["f"]; // For security reasons a client does not specify the directory of the file to be downloaded. // Rather it specifies the offset within the list of allowed directories for the version. vector <string> directories = Sync_Logic::files_get_directories (version, user); if (d >= directories.size ()) { request->response_code = 400; return ""; } string directory = directories [d]; if (action == Sync_Logic::files_total_checksum) { return convert_to_string (Sync_Logic::files_get_total_checksum (version, user)); } else if (action == Sync_Logic::files_directory_checksum) { int checksum = Sync_Logic::files_get_directory_checksum (directory); return convert_to_string (checksum); } else if (action == Sync_Logic::files_directory_files) { vector <string> paths = Sync_Logic::files_get_files (directory); return filter_string_implode (paths, "\n"); } else if (action == Sync_Logic::files_file_checksum) { int checksum = Sync_Logic::files_get_file_checksum (directory, file); return convert_to_string (checksum); } else if (action == Sync_Logic::files_file_download) { // This triggers the correct mime type. request->get = "file.download"; // Return the file's contents. string path = filter_url_create_root_path (directory, file); return filter_url_file_get_contents (path); } // Bad request. Delay flood of bad requests. this_thread::sleep_for (chrono::seconds (1)); request->response_code = 400; return ""; }
vector <string> client_logic_usfm_resources_get () { string contents = filter_url_file_get_contents (client_logic_usfm_resources_path ()); return filter_string_explode (contents, '\n'); }