bool seek_time(const stdString &index_name, const stdString &channel_name, const stdString &start_txt) { epicsTime start; if (!string2epicsTime(start_txt, start)) { fprintf(stderr, "Cannot convert '%s' to time stamp\n", start_txt.c_str()); return false; } IndexFile index(3); index.open(index_name, true); stdString directory; AutoPtr<RTree> tree(index.getTree(channel_name, directory)); if (tree) { RTree::Datablock block; RTree::Node node(tree->getM(), true); int idx; if (tree->searchDatablock(start, node, idx, block)) { stdString s, e; printf("Found block %s - %s\n", epicsTimeTxt(node.record[idx].start, s), epicsTimeTxt(node.record[idx].end, e)); } else printf("Nothing found\n"); } else fprintf(stderr, "Cannot find channel '%s'\n", channel_name.c_str()); return true; }
int CSqSQLite::Update( const stdString &sTable, const stdString &sWhere, CSqMulti &mData ) {_STT(); if ( !mData.size() ) return 0; oex::CPropertyBag pb; SQBIND_MultiToPropertyBag( mData.list(), pb ); return CSQLite::Update( sTable.c_str(), sWhere.c_str(), pb ); }
// Iterate over names in index, fetch those // that match the pattern. void get_names_for_pattern(Index &index, stdVector<stdString> &names, const stdString &pattern) { if (verbose) printf("Expanding pattern '%s'\n", pattern.c_str()); try { AutoPtr<RegularExpression> regex; if (pattern.length() > 0) regex.assign(new RegularExpression(pattern.c_str())); Index::NameIterator name_iter; if (!index.getFirstChannel(name_iter)) return; // No names // Put all names in binary tree BinaryTree<stdString> channels; do { if (regex && !regex->doesMatch(name_iter.getName())) continue; // skip what doesn't match regex channels.add(name_iter.getName()); } while (index.getNextChannel(name_iter)); // Sorted dump of names channels.traverse(add_name2vector, (void *)&names); } catch (GenericException &e) { throw GenericException(__FILE__, __LINE__, "Error expanding name pattern '%s':\n%s\n", pattern.c_str(), e.what()); } }
void DataWriter::makeDataFileName(int serial, stdString &name) { int len; char buffer[30]; if (data_file_name_base.length() > 0) { name = data_file_name_base; if (serial > 0) { len = snprintf(buffer, sizeof(buffer), "-%d", serial); if (len >= (int)sizeof(buffer)) len = sizeof(buffer)-1; name.append(buffer, len); } return; } // Else: Create name based on "<today>[-serial]" int year, month, day, hour, min, sec; unsigned long nano; epicsTime now = epicsTime::getCurrent(); epicsTime2vals(now, year, month, day, hour, min, sec, nano); if (serial > 0) len = snprintf(buffer, sizeof(buffer), "%04d%02d%02d-%d", year, month, day, serial); else len = snprintf(buffer,sizeof(buffer), "%04d%02d%02d", year, month, day); if (len >= (int)sizeof(buffer)) len = sizeof(buffer)-1; name.assign(buffer, len); }
void CGIDemangler::unescape(stdString &text) { size_t total = text.length() + 1; MemoryBuffer<char> buf(total); memcpy(buf.mem(), text.c_str(), total); unescape(buf.mem()); text = buf.mem(); }
// Copy samples from archive with index_name // to new index copy_name. // Uses all samples in source archive or [start ... end[ . void copy(const stdString &index_name, const stdString ©_name, int RTreeM, const epicsTime *start, const epicsTime *end, const stdString &single_name) { IndexFile index(RTreeM), new_index(RTreeM); IndexFile::NameIterator names; size_t channel_count = 0, value_count = 0, back_count = 0; BenchTimer timer; stdString dir1, dir2; Filename::getDirname(index_name, dir1); Filename::getDirname(copy_name, dir2); if (dir1 == dir2) { printf("You have to assert that the new index (%s)\n" "is in a directory different from the old index\n" "(%s)\n", copy_name.c_str(), index_name.c_str()); return; } index.open(index_name, true); new_index.open(copy_name, false); if (verbose) printf("Copying values from '%s' to '%s'\n", index_name.c_str(), copy_name.c_str()); RawDataReader reader(index); if (single_name.empty()) { bool ok = index.getFirstChannel(names); while (ok) { copy_channel(names.getName(), start, end, index, reader, new_index, channel_count, value_count, back_count); ok = index.getNextChannel(names); } } else copy_channel(single_name, start, end, index, reader, new_index, channel_count, value_count, back_count); new_index.close(); index.close(); timer.stop(); if (verbose) { printf("Total: %lu channels, %lu values\n", (unsigned long) channel_count, (unsigned long) value_count); printf("Skipped %lu back-in-time values\n", (unsigned long) back_count); printf("Runtime: %s\n", timer.toString().c_str()); } }
void dot_index(const stdString &index_name, const stdString channel_name, const stdString &dot_name) { IndexFile index(3); index.open(index_name, true); stdString directory; AutoPtr<RTree> tree(index.getTree(channel_name, directory)); if (!tree) { fprintf(stderr, "Cannot find '%s' in index '%s'.\n", channel_name.c_str(), index_name.c_str()); return; } tree->makeDot(dot_name.c_str()); }
GroupInfo::GroupInfo(const stdString &name) : NamedBase(name.c_str()), mutex("GroupInfo", EngineLocks::GroupInfo), num_connected(0), disable_count(0) { }
// Remove name from directory file. // Will not remove data but only "pointers" to the data! bool OldDirectoryFile::remove(const stdString &name) { OldDirectoryFileIterator i(this); HashTable::HashValue hash = HashTable::Hash(name.c_str()); FileOffset prev=0, offset = readHTEntry(hash); // Follow the channel chain that hashes to this value: while (offset != INVALID_OFFSET) { i.entry.read(_file, offset); if (name == i.entry.data.name) { // unlink this entry from list of names that share 'hash' if (prev == 0) // first entry in list? { // Make hash table point to the next channel, // skipping this one writeHTEntry(hash, i.entry.data.next_entry_offset); return true; } else { // Make previous entry skip this one offset = i.entry.data.next_entry_offset; i.entry.read(_file, prev); i.entry.data.next_entry_offset = offset; i.entry.write(_file, prev); return true; } } prev = offset; offset = i.entry.data.next_entry_offset; } return false; }
bool ArchiveDataClient::getNames(int key, const stdString &pattern, stdVector<NameInfo> &names) { xmlrpc_value *result, *element; const char *name; xmlrpc_int32 start_sec, start_nano, end_sec, end_nano; size_t count, i, len; NameInfo info; result = xmlrpc_client_call(&env, (char *)URL, "archiver.names", "(is)", (xmlrpc_int32) key, pattern.c_str()); if (log_fault()) return false; count = xmlrpc_array_size(&env, result); names.reserve(count); for (i=0; i<count; ++i) { element = xmlrpc_array_get_item(&env, result, i); if (log_fault()) return false; xmlrpc_parse_value(&env, element, "{s:s#,s:i,s:i,s:i,s:i,*}", "name", &name, &len, "start_sec", &start_sec, "start_nano", &start_nano, "end_sec", &end_sec, "end_nano", &end_nano); if (log_fault()) return false; info.name.assign(name, len); pieces2epicsTime(start_sec, start_nano, info.start); pieces2epicsTime(end_sec, end_nano, info.end); names.push_back(info); } xmlrpc_DECREF(result); return true; }
// Visitor for BinaryTree of channel names; // see get_names_for_pattern(). static void add_name2vector(const stdString &name, void *arg) { stdVector<stdString> *names = (stdVector<stdString> *)arg; if (verbose) printf("%s\n", name.c_str()); names->push_back(name); }
ArchiveException::ArchiveException(const char *sourcefile, size_t line, Code code, const stdString &detail) : GenericException(sourcefile, line, "Archive Exception: %s,\n%s\n", error_text[code], detail.c_str()), code(code) {}
int CSqEzdib::Save( const stdString &sFile ) {_STT(); if ( !m_ezimg || !sFile.length() ) return 0; return oss::ezd_save( m_ezimg, oexStrToMb( sqbind::std2oex( sFile ) ).c_str() ); }
bool OldDirectoryFile::open(const stdString &filename, bool for_write) { _filename = filename; Filename::getDirname(_filename, _dirname); _file_for_write = for_write; _file = fopen(filename.c_str(), "r+b"); if (_file==0 && for_write) _file = fopen(filename.c_str(), "w+b"); if (_file == 0) return false; // Does file contain HT? fseek(_file, 0, SEEK_END); _next_free_entry = ftell(_file); if (_next_free_entry < FirstEntryOffset) { if (!for_write) // ... but it should { LOG_MSG("OldDirectoryFile::open(%s): Missing HT\n", filename.c_str()); return false; } // Initialize HT: for (HashTable::HashValue entry = 0; entry < HashTable::HashTableSize; ++entry) writeHTEntry(entry, INVALID_OFFSET); // Next free entry = first entry after HT _next_free_entry = FirstEntryOffset; } // Check if file size = HT + N full entries FileOffset rest = (_next_free_entry - FirstEntryOffset) % OldDirectoryFileEntry::DataSize; if (rest) LOG_MSG("Suspicious directory file %s has a 'tail' of %d Bytes\n", filename.c_str(), rest); #ifdef LOG_DIRFILE if (_file.isReadonly()) LOG_MSG("(readonly) "); LOG_MSG("OldDirectoryFile %s\n", _filename); #endif return true; }
void output_ascii(const stdString &archive_name, const stdString &channel_name, const epicsTime &start, const epicsTime &end) { Archive archive(new ARCHIVE_TYPE(archive_name)); ChannelIterator channel(archive); ValueIterator value(archive); if (! archive.findChannelByName(channel_name, channel)) { printf("# Channel not found: %s\n", channel_name.c_str()); return; } printf("channel=%s\n", channel_name.c_str()); if (! channel->getValueAfterTime (start, value)) { printf("# no values\n"); return; } CtrlInfo info; double period=-1; epicsTime last_time = nullTime; while (value && (!isValidTime(end) || value->getTime() < end)) { if (period != value.getPeriod()) { period = value.getPeriod(); output_header(value); } if (info != *value->getCtrlInfo()) { info = *value->getCtrlInfo(); output_info(&info); } if (isValidTime(last_time) && value->getTime() < last_time) printf("Error: back in time:\n"); value->show(stdout); fputs("\n", stdout); last_time = value->getTime(); ++value; } }
void input_ascii(const stdString &archive_name, const stdString &file_name) { ArchiveParser parser; if (! parser.open(file_name)) { printf("Cannot open '%s'\n", file_name.c_str()); } Archive archive(new BinArchive(archive_name, true)); parser.run(archive); }
size_t DataFile::getHeaderSize(const stdString &name, DbrType dbr_type, DbrCount dbr_count, size_t num_samples) { size_t raw_value_size = RawValue::getSize(dbr_type, dbr_count); size_t buf_free = num_samples * raw_value_size; // 'INFO' + name + '\0' + header info + data buffer return 4 + name.length() + 1 + sizeof(DataHeader::DataHeaderData) + buf_free; }
// Attach DiskBasedHashTable to disk file of given name. // a) new file: setup Hash Table // b) existing file for read-only: check HT // c) existing file for read-write: check HT DirectoryFile::DirectoryFile(const stdString &filename, bool for_write) { _filename = filename; Filename::getDirname(_filename, _dirname); _file_for_write = for_write; _file = fopen(filename.c_str(), "r+b"); if (_file==0 && for_write) _file = fopen(filename.c_str(), "w+b"); if (_file == 0) throwDetailedArchiveException(OpenError, filename); // Does file contain HT? fseek(_file, 0, SEEK_END); _next_free_entry = ftell(_file); if (_next_free_entry < FirstEntryOffset) { if (!for_write) // ... but it should throwDetailedArchiveException(Invalid, "Missing HT"); // Initialize HT: for (HashTable::HashValue entry = 0; entry < HashTable::HashTableSize; ++entry) writeHTEntry(entry, INVALID_OFFSET); // Next free entry = first entry after HT _next_free_entry = FirstEntryOffset; } // Check if file size = HT + N full entries FileOffset rest = (_next_free_entry - FirstEntryOffset) % BinChannel::getDataSize(); if (rest) LOG_MSG("Suspicious directory file %s has a 'tail' of %d Bytes\n", filename.c_str(), rest); #ifdef LOG_DIRFILE if (_file.isReadonly()) LOG_MSG("(readonly) "); LOG_MSG("DirectoryFile %s\n", _filename); #endif }
static void format_time(const epicsTime &time, stdString &text) { if (only_millisecs) { epicsTimeStamp stamp = time; stamp.nsec = ((stamp.nsec + 500000) / 1000000) * 1000000; epicsTime2string(epicsTime(stamp), text); text = text.substr(0, 23); return; } epicsTime2string(time, text); }
void CtrlInfo::getState(size_t state, stdString &result) const { size_t len; const char *text = getState(state, len); if (text) { result.assign(text, len); return; } char buffer[80]; sprintf(buffer, "<Undef: %u>", (unsigned int)state); result = buffer; }
DataFile::DataFile(const stdString &dirname, const stdString &basename, const stdString &filename, bool for_write) : ref_count(1), for_write(for_write), is_tagged_file(false), filename(filename), dirname(dirname), basename(basename) { #ifdef LOG_DATAFILE LOG_MSG("DataFile %s (%c) created\n", filename.c_str(), (for_write?'W':'R')); #endif }
// Try to locate entry with given name. OldDirectoryFileIterator OldDirectoryFile::find(const stdString &name) { OldDirectoryFileIterator i(this); i._hash = HashTable::Hash(name.c_str()); FileOffset offset = readHTEntry(i._hash); while (offset != INVALID_OFFSET) { i.entry.read(_file, offset); if (name == i.entry.data.name) return i; offset = i.entry.data.next_entry_offset; } i.entry.clear(); return i; }
// Try to locate entry with given name. DirectoryFileIterator DirectoryFile::find(const stdString &name) { DirectoryFileIterator i(this); i._hash = HashTable::Hash(name.c_str()); FileOffset offset = readHTEntry(i._hash); while (offset != INVALID_OFFSET) { i.getChannel()->read(_file, offset); if (name == i.getChannel()->getName()) return i; offset = i.getChannel()->getNextEntryOffset(); } i.getChannel()->clear(); return i; }
size_t read_samples(const stdString &index_name, const stdString &channel_name) { IndexFile index(50); index.open(index_name.c_str(), true); size_t samples = 0; AutoPtr<DataReader> reader(new RawDataReader(index)); const RawValue::Data *data = reader->find(channel_name, 0); while (data) { ++samples; data = reader->next(); } reader = 0; DataFile::close_all(); return samples; }
bool old_write_samples(const stdString &index_name, const stdString &channel_name, size_t samples) { OldDirectoryFile index; CtrlInfo info; if (!index.open(index_name, true)) { fprintf(stderr, "Cannot create dir. file '%s'\n", index_name.c_str()); return false; } info.setNumeric (2, "socks", 0.0, 10.0, 0.0, 1.0, 9.0, 10.0); DbrType dbr_type = DBR_TIME_DOUBLE; DbrCount dbr_count = 1; OldDataWriter * writer = new OldDataWriter(index, channel_name, info, dbr_type, dbr_count, 2.0, samples); dbr_time_double *data = RawValue::allocate(dbr_type, dbr_count, 1); data->status = 0; data->severity = 0; size_t i; for (i=0; i<samples; ++i) { data->value = (double) i; RawValue::setTime(data, epicsTime::getCurrent()); if (!writer->add(data)) { fprintf(stderr, "Write error with value %zu/%zu\n", i, samples); break; } } RawValue::free(data); delete writer; DataFile::close_all(); return true; }
void CtrlInfo::setNumeric( int32_t prec, const stdString &units, float disp_low, float disp_high, float low_alarm, float low_warn, float high_warn, float high_alarm) { size_t len = units.length(); size_t size = sizeof(CtrlInfoData) + len; _infobuf.reserve(size); CtrlInfoData *info = _infobuf.mem(); info->type = Numeric; info->size = size; info->value.analog.disp_high = disp_high; info->value.analog.disp_low = disp_low; info->value.analog.low_warn = low_warn; info->value.analog.low_alarm = low_alarm; info->value.analog.high_warn = high_warn; info->value.analog.high_alarm = high_alarm; info->value.analog.prec = prec; string2cp (info->value.analog.units, units, len+1); }
bool write_samples(const stdString &index_name, const stdString &channel_name, size_t samples) { IndexFile index(50); CtrlInfo info; index.open(index_name.c_str(), false); info.setNumeric (2, "socks", 0.0, 10.0, 0.0, 1.0, 9.0, 10.0); DbrType dbr_type = DBR_TIME_DOUBLE; DbrCount dbr_count = 1; DataWriter::file_size_limit = 10*1024*1024; AutoPtr<DataWriter> writer( new DataWriter(index, channel_name, info, dbr_type, dbr_count, 2.0, samples)); RawValueAutoPtr data(RawValue::allocate(dbr_type, dbr_count, 1)); data->status = 0; data->severity = 0; size_t i; for (i=0; i<samples; ++i) { data->value = (double) i; RawValue::setTime(data, epicsTime::getCurrent()); if (!writer->add(data)) { fprintf(stderr, "Write error with value %zu/%zu\n", i, samples); break; } } writer = 0; DataFile::close_all(); return true; }
size_t old_read_samples(const stdString &index_name, const stdString &channel_name) { OldDirectoryFile index; if (!index.open(index_name)) { fprintf(stderr, "Cannot open dir. file '%s'\n", index_name.c_str()); return 0; } size_t samples = 0; OldDataReader *reader = new OldDataReader(index); const RawValue::Data *data = reader->find(channel_name, 0); while (data) { ++samples; data = reader->next(); } delete reader; DataFile::close_all(); return samples; }
// Add a new entry to HT. // Throws Invalid if that entry exists already. // // After calling this routine the current entry // is undefined. It must be initialized and // then written with saveEntry (). DirectoryFileIterator DirectoryFile::add(const stdString &name) { DirectoryFileIterator i(this); BinChannel *channel = i.getChannel(); const char *cname = name.c_str(); i._hash = HashTable::Hash(cname); FileOffset offset = readHTEntry(i._hash); if (offset == INVALID_OFFSET) // Empty HT slot: writeHTEntry(i._hash, _next_free_entry); else { // Follow the entry chain that hashed to this value: FileOffset next = offset; while (next != INVALID_OFFSET) { channel->read(_file, next); if (name == channel->getName()) // already there? return i; next = channel->getNextEntryOffset(); } // i._entry: last entry in chain. // make that one point to new entry: channel->setNextEntryOffset(_next_free_entry); channel->write(_file, channel->getOffset()); } // Last entry points now to _next_free_entry. // Create the new entry there: channel->init(cname); channel->setNextEntryOffset(INVALID_OFFSET); channel->write(_file, _next_free_entry); fflush(_file); _next_free_entry += channel->getDataSize(); return i; }
// Remove name from directory file. // Will not remove data but only "pointers" to the data! bool DirectoryFile::remove(const stdString &name) { DirectoryFileIterator i(this); BinChannel *channel = i.getChannel(); HashTable::HashValue hash = HashTable::Hash(name.c_str()); FileOffset prev=0, offset = readHTEntry(hash); // Follow the channel chain that hashes to this value: while (offset != INVALID_OFFSET) { channel->read(_file, offset); if (name == channel->getName()) { // unlink this entry from list of names that share 'hash' if (prev == 0) // first entry in list? { // Make hash table point to the next channel, // skipping this one writeHTEntry(hash, channel->getNextEntryOffset()); return true; } else { // Make previous entry skip this one offset = channel->getNextEntryOffset(); channel->read(_file, prev); channel->setNextEntryOffset(offset); channel->write(_file, prev); return true; } } prev = offset; offset = channel->getNextEntryOffset(); } return false; }