// class method rose_addr_t SRecord::load(const std::vector<SRecord> &srecs, MemoryMap &map, bool createSegments, unsigned accessPerms) { if (createSegments) { // We want to minimize the number of buffers in the map, so the first step is to discover what addresses are covered by // the data S-records Sawyer::Container::IntervalSet<AddressInterval> addressesUsed; BOOST_FOREACH (const SRecord &srec, srecs) { switch (srec.type()) { case SREC_DATA16: case SREC_DATA24: case SREC_DATA32: addressesUsed.insert(AddressInterval::baseSize(srec.address(), srec.data().size())); break; default: break; } } // Create buffers for the data and insert them into the memory map BOOST_FOREACH (const AddressInterval &interval, addressesUsed.intervals()) { ASSERT_forbid(interval.isWhole()); // not practically possible since S-Record file would be >2^65 bytes map.insert(interval, MemoryMap::Segment::anonymousInstance(interval.size(), accessPerms, "S-Records")); } } // Populate the map by writing the S-Record data into it. rose_addr_t startingAddr = 0; BOOST_FOREACH (const SRecord &srec, srecs) { switch (srec.type()) { case SREC_DATA16: case SREC_DATA24: case SREC_DATA32: { if (!srec.data().empty()) { size_t nwritten = map.at(srec.address()).write(srec.data()).size(); if (nwritten != srec.data().size()) throw MemoryMap::NotMapped("S-Record destination is not mapped for " + StringUtility::plural(srec.data().size(), "bytes"), &map, srec.address()); } break; } case SREC_START16: case SREC_START24: case SREC_START32: startingAddr = srec.address(); break; default: break; } } return startingAddr; }
// class method NoOperation::IndexIntervals NoOperation::largestEarliestNonOverlapping(const NoOperation::IndexIntervals &in) { NoOperation::IndexIntervals sorted = in, retval; std::sort(sorted.begin(), sorted.end(), sortBySizeAddress); Sawyer::Container::IntervalSet<IndexInterval> seen; BOOST_FOREACH (const NoOperation::IndexInterval &where, sorted) { if (!seen.isOverlapping(where)) { retval.push_back(where); seen.insert(where); } } return retval; }
void RegisterStateGeneric::assertStorageConditions(const std::string &when, const RegisterDescriptor ®) const { #if !defined(NDEBUG) && defined(RegisterStateGeneric_ExtraAssertions) #if 1 // DEBUGGING [Robb P. Matzke 2015-09-28] static volatile size_t ncalls = 0; ++ncalls; #endif std::ostringstream error; BOOST_FOREACH (const Registers::Node &rnode, registers_.nodes()) { Sawyer::Container::IntervalSet<BitRange> foundLocations; BOOST_FOREACH (const RegPair ®pair, rnode.value()) { if (!regpair.desc.is_valid()) { error <<"invalid register descriptor"; } else if (regpair.desc.get_major() != rnode.key().majr || regpair.desc.get_minor() != rnode.key().minr) { error <<"register is in wrong list; register=" <<regpair.desc.get_major() <<"." <<regpair.desc.get_minor() <<", list=" <<rnode.key().majr <<"." <<rnode.key().minr; } else if (regpair.value == NULL) { error <<"value is null for register " <<regpair.desc; } else if (regpair.value->get_width() != regpair.desc.get_nbits()) { error <<"value width (" <<regpair.value->get_width() <<") is incorrect for register " <<regpair.desc; } else if (foundLocations.isOverlapping(regpair.location())) { error <<"register " <<regpair.desc <<" is stored multiple times in the list"; } foundLocations.insert(regpair.location()); if (!error.str().empty()) break; } if (!error.str().empty()) { mlog[FATAL] <<when <<" register " <<reg <<":\n"; mlog[FATAL] <<" " <<error.str() <<"\n"; mlog[FATAL] <<" related registers:\n"; BOOST_FOREACH (const RegPair ®pair, rnode.value()) { mlog[FATAL] <<" " <<regpair.desc; if (regpair.value == NULL) mlog[FATAL] <<"\tnull value"; mlog[FATAL] <<"\n"; } abort(); } }