void bz2_test() { std::string data("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); ar::zip::header head; head.path = "zip_test.dat"; head.encrypted = false; head.method = ar::zip::method::bzip2; head.update_time = std::time(0); head.file_size = static_cast<boost::uint32_t>(data.size()); head.attributes = ar::msdos::attributes::read_only; head.permissions = 0123; head.comment = "test comment"; io_ex::tmp_file archive; ar::basic_zip_file_sink< io_ex::dont_close_device<io_ex::tmp_file> > sink(io_ex::dont_close(archive)); sink.create_entry(head); io_ex::blocking_write(sink, &data[0], data.size()); sink.close(); sink.close_archive(); io::seek(archive, 0, BOOST_IOS::beg); ar::basic_zip_file_source<io_ex::tmp_file> src(archive); BOOST_CHECK(src.next_entry()); check_header(head, src.header()); std::string data2; io::copy(src, io::back_inserter(data2)); BOOST_CHECK_EQUAL_COLLECTIONS( data.begin(), data.end(), data2.begin(), data2.end() ); BOOST_CHECK(!src.next_entry()); }
void RelativeDateFormat::loadDates(UErrorCode &status) { UResourceBundle *rb = ures_open(NULL, fLocale.getBaseName(), &status); LocalUResourceBundlePointer dateTimePatterns( ures_getByKeyWithFallback(rb, "calendar/gregorian/DateTimePatterns", (UResourceBundle*)NULL, &status)); if(U_SUCCESS(status)) { int32_t patternsSize = ures_getSize(dateTimePatterns.getAlias()); if (patternsSize > kDateTime) { int32_t resStrLen = 0; int32_t glueIndex = kDateTime; if (patternsSize >= (kDateTimeOffset + kShort + 1)) { int32_t offsetIncrement = (fDateStyle & ~kRelative); // Remove relative bit. if (offsetIncrement >= (int32_t)kFull && offsetIncrement <= (int32_t)kShortRelative) { glueIndex = kDateTimeOffset + offsetIncrement; } } const UChar *resStr = ures_getStringByIndex(dateTimePatterns.getAlias(), glueIndex, &resStrLen, &status); if (U_SUCCESS(status) && resStrLen >= patItem1Len && u_strncmp(resStr,patItem1,patItem1Len)==0) { fCombinedHasDateAtStart = TRUE; } fCombinedFormat = new SimpleFormatter(UnicodeString(TRUE, resStr, resStrLen), 2, 2, status); } } // Data loading for relative names, e.g., "yesterday", "today", "tomorrow". fDatesLen = UDAT_DIRECTION_COUNT; // Maximum defined by data. fDates = (URelativeString*) uprv_malloc(sizeof(fDates[0])*fDatesLen); RelDateFmtDataSink sink(fDates, fDatesLen); ures_getAllItemsWithFallback(rb, "fields/day/relative", sink, status); ures_close(rb); if(U_FAILURE(status)) { fDatesLen=0; return; } }
inline bool generate(OutputIterator target_sink, Expr const& xpr, Parameter const& param) { typedef spirit::traits::is_component<karma::domain, Expr> is_component; // report invalid expression error as early as possible BOOST_MPL_ASSERT_MSG(is_component::value, xpr_is_not_convertible_to_a_generator, (OutputIterator, Expr, Parameter)); // wrap user supplied iterator into our own output iterator detail::output_iterator<OutputIterator> sink(target_sink); typedef typename result_of::as_component<karma::domain, Expr>::type component; typedef typename component::director director; component c = spirit::as_component(karma::domain(), xpr); return director::generate(c, sink, unused, unused, param); }
int main(int argc, char* argv[]) { eva::stopwatch watch; eva::queue* queue_in = new eva::queue(); eva::queue* queue_out = new eva::queue(); test_source source( *queue_in ); test_processor processor( *queue_in, *queue_out ); test_sink sink( *queue_out ); watch.start(); sink.start(); processor.start(); source.start(); sink.join(); processor.join(); source.join(); std::cout << N << " events in " << watch.elapsed_ms() << " ms. " << (int)( N / watch.elapsed_s() ) << " per sec" << std::endl; }
void UpdateDownloader::Run() { // no initialization to do, so signal readiness immediately SignalReady(); try { const std::wstring tmpdir = CreateUniqueTempDirectory(); Settings::WriteConfigValue("UpdateTempDir", tmpdir); UpdateDownloadSink sink(*this, tmpdir); DownloadFile(m_appcast.DownloadURL, &sink); sink.Close(); UI::NotifyUpdateDownloaded(sink.GetFilePath(), m_appcast); } catch ( ... ) { UI::NotifyUpdateError(); throw; } }
//[ example_sinks_ostream void init_logging() { boost::shared_ptr< logging::core > core = logging::core::get(); // Create a backend and attach a couple of streams to it boost::shared_ptr< sinks::text_ostream_backend > backend = boost::make_shared< sinks::text_ostream_backend >(); backend->add_stream( boost::shared_ptr< std::ostream >(&std::clog, boost::empty_deleter())); backend->add_stream( boost::shared_ptr< std::ostream >(new std::ofstream("sample.log"))); // Enable auto-flushing after each log record written backend->auto_flush(true); // Wrap it into the frontend and register in the core. // The backend requires synchronization in the frontend. typedef sinks::synchronous_sink< sinks::text_ostream_backend > sink_t; boost::shared_ptr< sink_t > sink(new sink_t(backend)); core->add_sink(sink); }
QVariantMap QgsOrderByExpressionAlgorithm::processAlgorithm( const QVariantMap ¶meters, QgsProcessingContext &context, QgsProcessingFeedback *feedback ) { std::unique_ptr< QgsProcessingFeatureSource > source( parameterAsSource( parameters, QStringLiteral( "INPUT" ), context ) ); if ( !source ) throw QgsProcessingException( invalidSourceError( parameters, QStringLiteral( "INPUT" ) ) ); QString expressionString = parameterAsExpression( parameters, QStringLiteral( "EXPRESSION" ), context ); bool ascending = parameterAsBoolean( parameters, QStringLiteral( "ASCENDING" ), context ); bool nullsFirst = parameterAsBoolean( parameters, QStringLiteral( "NULLS_FIRST" ), context ); QString sinkId; std::unique_ptr< QgsFeatureSink > sink( parameterAsSink( parameters, QStringLiteral( "OUTPUT" ), context, sinkId, source->fields(), source->wkbType(), source->sourceCrs() ) ); if ( !sink ) throw QgsProcessingException( invalidSinkError( parameters, QStringLiteral( "OUTPUT" ) ) ); long count = source->featureCount(); double step = count > 0 ? 100.0 / count : 1; int current = 0; QgsFeatureRequest request; request.addOrderBy( expressionString, ascending, nullsFirst ); QgsFeature inFeature; QgsFeatureIterator features = source->getFeatures( request, QgsProcessingFeatureSource::FlagSkipGeometryValidityChecks ); while ( features.nextFeature( inFeature ) ) { if ( feedback->isCanceled() ) { break; } sink->addFeature( inFeature ); feedback->setProgress( current * step ); current++; } QVariantMap outputs; outputs.insert( QStringLiteral( "OUTPUT" ), sinkId ); return outputs; }
static SquashStatus squash_gipfeli_decompress_buffer (SquashCodec* codec, size_t* decompressed_length, uint8_t decompressed[SQUASH_ARRAY_PARAM(*decompressed_length)], size_t compressed_length, const uint8_t compressed[SQUASH_ARRAY_PARAM(compressed_length)], SquashOptions* options) { util::compression::Compressor* compressor = util::compression::NewGipfeliCompressor(); util::compression::UncheckedByteArraySink sink((char*) decompressed); util::compression::ByteArraySource source((const char*) compressed, compressed_length); SquashStatus res = SQUASH_OK; if (compressor == NULL) return squash_error (SQUASH_MEMORY); std::string compressed_str((const char*) compressed, compressed_length); size_t uncompressed_length; if (!compressor->GetUncompressedLength (compressed_str, &uncompressed_length)) { res = squash_error (SQUASH_FAILED); goto cleanup; } if (uncompressed_length > *decompressed_length) { res = squash_error (SQUASH_BUFFER_FULL); goto cleanup; } else { *decompressed_length = uncompressed_length; } if (!compressor->UncompressStream (&source, &sink)) { res = squash_error (SQUASH_FAILED); } cleanup: delete compressor; return res; }
//////////////////////////////////////////////////////////////////////////// // Main program //////////////////////////////////////////////////////////////////////////// int main() { std::cout << "/////////////////////////////////////////////////////////\n\n"; std::cout << "\tPrinting integers in a matrix using Spirit...\n\n"; std::cout << "/////////////////////////////////////////////////////////\n\n"; // here we put the data to generate std::vector<std::vector<int> > v; // now, generate the size and the contents for the matrix std::srand((unsigned int)std::time(NULL)); std::size_t rows = std::rand() / (RAND_MAX / 10); std::size_t columns = std::rand() / (RAND_MAX / 10); v.resize(rows); for (std::size_t row = 0; row < rows; ++row) { v[row].resize(columns); std::generate(v[row].begin(), v[row].end(), std::rand); } // ok, we got the matrix, now print it out std::string generated; std::back_insert_iterator<std::string> sink(generated); if (!client::generate_matrix(sink, v)) { std::cout << "-------------------------\n"; std::cout << "Generating failed\n"; std::cout << "-------------------------\n"; } else { std::cout << "-------------------------\n"; std::cout << "Generated:\n" << generated << "\n"; std::cout << "-------------------------\n"; } return 0; }
int main5 (int argc, char *argv[]) { zmq::context_t context (1); // Socket to send messages on zmq::socket_t sender(context, ZMQ_PUSH); sender.bind("tcp://*:5557"); std::cout << "Press Enter when the workers are ready: " << std::endl; getchar (); std::cout << "Sending tasks to workers...\n" << std::endl; // The first message is "0" and signals start of batch zmq::socket_t sink(context, ZMQ_PUSH); sink.connect("tcp://localhost:5558"); zmq::message_t message(2); memcpy(message.data(), "0", 1); sink.send(message); // Initialize random number generator srand ((unsigned) time (NULL)); // Send 100 tasks int task_nbr; int total_msec = 0; // Total expected cost in msecs for (task_nbr = 0; task_nbr < 100; task_nbr++) { int workload; // Random workload from 1 to 100msecs workload = within (100) + 1; total_msec += workload; message.rebuild(10); sprintf ((char *) message.data(), "%d", workload); sender.send(message); } std::cout << "Total expected cost: " << total_msec << " msec" << std::endl; s_sleep (1); // Give 0MQ time to deliver return 0; }
int main () { zmq::context_t context(1); // First allow 0MQ to set the identity zmq::socket_t sink(context, ZMQ_XREP); sink.bind( "inproc://example"); zmq::socket_t anonymous(context, ZMQ_REQ); anonymous.connect( "inproc://example"); s_send (anonymous, "XREP uses a generated UUID"); s_dump (sink); zmq::socket_t identified (context, ZMQ_REQ); identified.setsockopt( ZMQ_IDENTITY, "Hello", 5); identified.connect( "inproc://example"); s_send (identified, "XREP socket uses REQ's socket identity"); s_dump (sink); return 0; }
string GdbResponseWriter::Write(vector<GdbResponseType> const& response) const { namespace karma = boost::spirit::karma; typedef std::back_insert_iterator<std::string> sink_type; // For debugging //GdbResponsePrinter printer; //for (auto element : response) // visitAll(printer, element); std::string result; sink_type sink(result); std::unique_ptr< boost::spirit::karma::grammar<std::back_insert_iterator<std::string>, vector<GdbResponseType>()> > g(response_write_grammar()); if(!karma::generate(sink, *g, response)) raiseError(GeneratorException("Error writing user response.")); return result; }
void CLog::add_text_file_sink_unorder() { boost::shared_ptr< sinks::text_file_backend > file_backend = boost::make_shared< sinks::text_file_backend >( keywords::file_name = "log_%Y-%m-%d_%H-%M-%S_%N.txt", keywords::rotation_size = setting_.max_file_size_, // 시점 (일 변경시) keywords::time_based_rotation = sinks::file::rotation_at_time_point(0, 0, 0), // 주기 (1분마다) // keywords::time_based_rotation = sinks::file::rotation_at_time_interval(boost::posix_time::minutes(1)), keywords::format = "[%TimeStamp%] (%Severity%) : %Message%", keywords::min_free_space= setting_.max_storage_size_ + setting_.max_file_size_ ); file_backend->auto_flush(true); typedef sinks::asynchronous_sink< sinks::text_file_backend > sink_t; boost::shared_ptr< sink_t > sink(new sink_t(file_backend)); sink->set_formatter( expr::format("[%1%] [%2%] %3% %4%") % expr::attr< unsigned int >("RecordID") % expr::format_date_time< boost::posix_time::ptime >("TimeStamp", "%Y-%m-%d %H:%M:%S.%f") % logging::trivial::severity % expr::message ); sink->locked_backend()->set_file_collector( sinks::file::make_collector( keywords::target = "logs", keywords::max_size = setting_.max_storage_size_ ) ); logging::core::get()->add_sink(sink); }
QVariantMap QgsExtractByLocationAlgorithm::processAlgorithm( const QVariantMap ¶meters, QgsProcessingContext &context, QgsProcessingFeedback *feedback ) { std::unique_ptr< QgsFeatureSource > input( parameterAsSource( parameters, QStringLiteral( "INPUT" ), context ) ); std::unique_ptr< QgsFeatureSource > intersectSource( parameterAsSource( parameters, QStringLiteral( "INTERSECT" ), context ) ); const QList< int > selectedPredicates = parameterAsEnums( parameters, QStringLiteral( "PREDICATE" ), context ); QString dest; std::unique_ptr< QgsFeatureSink > sink( parameterAsSink( parameters, QStringLiteral( "OUTPUT" ), context, dest, input->fields(), input->wkbType(), input->sourceCrs() ) ); if ( !sink ) return QVariantMap(); auto addToSink = [&]( const QgsFeature & feature ) { QgsFeature f = feature; sink->addFeature( f, QgsFeatureSink::FastInsert ); }; process( input.get(), intersectSource.get(), selectedPredicates, addToSink, false, feedback ); QVariantMap results; results.insert( QStringLiteral( "OUTPUT" ), dest ); return results; }
void test_dilution_scheme(int const num_slice, int const num_block, DilutionType const type) { auto const name = dilution_names.at(type); auto const block_size = num_slice / num_block; std::cout << "T = " << num_slice << ", T" << name << num_block << " (Morningstar), T" << name << block_size << " (Other):\n\n"; DilutionScheme dilution_scheme(num_slice, block_size, type); for (int b = 0; b < dilution_scheme.size(); ++b) { auto const blocks = dilution_scheme[b]; std::cout << std::setw(2) << blocks.source() << " => " << std::setw(2) << blocks.sink() << "\n"; for (auto const slices : blocks) { std::cout << " " << std::setw(2) << slices.source() << " -> " << std::setw(2) << slices.sink() << "\n"; } } std::cout << "\n\n"; }
void task1p5(std::ifstream & inputfile, std::ofstream & outputfile) { std::string line; getline (inputfile,line); int linesize = line.size(); std::vector<bool*> map; getBoolRow(map,line,linesize); getBoolMap(inputfile,map,linesize); int count ; int mapsize = map.size(); for(int i =0 ; i< mapsize ; i++) { for (int j =0 ; j < linesize ; j++ ) { if (sink(map,i,j,mapsize,linesize)) { count++; } } } std::cout << count; }
void UpdateDownloader::Run() { // no initialization to do, so signal readiness immediately SignalReady(); try { const std::wstring tmpdir = CreateUniqueTempDirectory(); Settings::WriteConfigValue("UpdateTempDir", tmpdir); UpdateDownloadSink sink(*this, tmpdir); DownloadFile(m_appcast.DownloadURL, &sink, this); sink.Close(); if (Settings::HasDSAPubKeyPem()) { SignatureVerifier::VerifyDSASHA1SignatureValid(sink.GetFilePath(), m_appcast.DsaSignature); } else { // backward compatibility - accept as is, but complain about it LogError("Using unsigned updates!"); } UI::NotifyUpdateDownloaded(sink.GetFilePath(), m_appcast); } catch (BadSignatureException&) { CleanLeftovers(); // remove potentially corrupted file UI::NotifyUpdateError(Err_BadSignature); throw; } catch ( ... ) { UI::NotifyUpdateError(); throw; } }
void iso9660_dir_test() { ar::iso::header head; head.path = std::string(206u, 'A'); head.flags = ar::iso::file_flags::directory; head.recorded_time.year = 1970u-1900u; head.recorded_time.month = 1u; head.recorded_time.day = 1u; head.recorded_time.hour = 0u; head.recorded_time.minute = 0u; head.recorded_time.second = 0u; head.recorded_time.timezone = 0; io_ex::tmp_file archive; ar::basic_iso_file_sink< io_ex::dont_close_device<io_ex::tmp_file> > sink(io_ex::dont_close(archive)); ar::iso::volume_desc desc; sink.add_volume_desc(desc); desc.set_enhanced(); sink.add_volume_desc(desc); sink.create_entry(head); sink.close(); sink.close_archive(); io::seek(archive, 0, BOOST_IOS::beg); ar::basic_iso_file_source<io_ex::tmp_file> src(archive); src.select_volume_desc(1u); BOOST_REQUIRE(src.next_entry()); ::check_header(head, src.header()); BOOST_CHECK(!src.next_entry()); }
void h0_test() { std::string data("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); ar::lha::header head; head.level = 0; head.update_time = std::time(0); head.attributes = ar::msdos::attributes::read_only; head.path = "h0_test.dat"; head.os = '?'; io_ex::tmp_file archive; ar::basic_lzh_file_sink< io_ex::dont_close_device<io_ex::tmp_file> > sink(io_ex::dont_close(archive)); sink.create_entry(head); io_ex::blocking_write(sink, &data[0], data.size()); sink.close(); sink.close_archive(); io::seek(archive, 0, BOOST_IOS::beg); ar::basic_lzh_file_source<io_ex::tmp_file> src(archive); BOOST_CHECK(src.next_entry()); check_header(head, src.header()); std::string data2; io::copy(src, io::back_inserter(data2)); BOOST_CHECK_EQUAL_COLLECTIONS( data.begin(), data.end(), data2.begin(), data2.end() ); BOOST_CHECK(!src.next_entry()); }
void init_logging() { // Create a text file sink boost::shared_ptr< file_sink > sink(new file_sink( keywords::file_name = "%Y%m%d_%H%M%S_%5N.xml", /*< the resulting file name pattern >*/ keywords::rotation_size = 16384 /*< rotation size, in characters >*/ )); sink->set_formatter ( expr::format("\t<record id=\"%1%\" timestamp=\"%2%\">%3%</record>") % expr::attr< unsigned int >("RecordID") % expr::attr< boost::posix_time::ptime >("TimeStamp") % expr::xml_decor[ expr::stream << expr::smessage ] /*< the log message has to be decorated, if it contains special characters >*/ ); // Set header and footer writing functors sink->locked_backend()->set_open_handler(&write_header); sink->locked_backend()->set_close_handler(&write_footer); // Add the sink to the core logging::core::get()->add_sink(sink); }
std::string color::to_string() const { namespace karma = boost::spirit::karma; boost::spirit::karma::_1_type _1; boost::spirit::karma::eps_type eps; boost::spirit::karma::double_type double_; boost::spirit::karma::string_type kstring; boost::spirit::karma::uint_generator<uint8_t,10> color_generator; std::string str; std::back_insert_iterator<std::string> sink(str); karma::generate(sink, // begin grammar kstring[ boost::phoenix::if_(alpha()==255) [_1="rgb("].else_[_1="rgba("]] << color_generator[_1 = red()] << ',' << color_generator[_1 = green()] << ',' << color_generator[_1 = blue()] << kstring[ boost::phoenix::if_(alpha()==255) [_1 = ')'].else_[_1 =',']] << eps(alpha()<255) << double_ [_1 = alpha()/255.0] << ')' // end grammar ); return str; }
std::vector<std::wstring> L10n::GetSupportedLocaleDisplayNames() const { std::vector<std::wstring> supportedLocaleDisplayNames; for (Locale* const& locale : availableLocales) { if (strcmp(locale->getBaseName(), "long") == 0) { if (InDevelopmentCopy()) supportedLocaleDisplayNames.push_back(wstring_from_utf8(Translate("Long strings"))); continue; } UnicodeString utf16LocaleDisplayName; locale->getDisplayName(*locale, utf16LocaleDisplayName); char localeDisplayName[512]; CheckedArrayByteSink sink(localeDisplayName, ARRAY_SIZE(localeDisplayName)); utf16LocaleDisplayName.toUTF8(sink); ENSURE(!sink.Overflowed()); supportedLocaleDisplayNames.push_back(wstring_from_utf8(std::string(localeDisplayName, sink.NumberOfBytesWritten()))); } return supportedLocaleDisplayNames; }
void MaxHeap::sink(int i) { if(i < 0 || i >= MAX_HEAP_LENGTH) { return; } int left_child = i*2, right_child = i*2+1; if(left_child <= heap_size_ && right_child <= heap_size_) { int max = (dri_heap_[left_child]->getFeatureScore() < dri_heap_[right_child]->getFeatureScore()) ? right_child : left_child;//得到key最大的 if(dri_heap_[i]->getFeatureScore() < dri_heap_[max]->getFeatureScore()) { swap(dri_heap_, i, max); sink(max); } } else if(left_child <= heap_size_ && right_child > heap_size_)//只是存在左儿子节点? { if(dri_heap_[i]->getFeatureScore() < dri_heap_[left_child]->getFeatureScore()) swap(dri_heap_, i, left_child); return; } }
int main(int argc, char** argv) { ATK::InSndFileFilter<std::int16_t> generator("stereofile.wav"); assert(generator.get_nb_output_ports() == 2); int sampling_rate = generator.get_output_sampling_rate(); ATK::MiddleSideFilter<float> msfilter; msfilter.set_input_sampling_rate(sampling_rate); msfilter.set_input_port(0, &generator, 0); msfilter.set_input_port(1, &generator, 1); ATK::VolumeFilter<float> volumefilter; volumefilter.set_input_sampling_rate(sampling_rate); volumefilter.set_input_port(0, &msfilter, 1); volumefilter.set_volume(.5); ATK::MiddleSideFilter<float> msmergefilter; msmergefilter.set_input_sampling_rate(sampling_rate); msmergefilter.set_input_port(0, &msfilter, 0); msmergefilter.set_input_port(1, &volumefilter, 0); ATK::VolumeFilter<float> volumefilter2(2); volumefilter2.set_input_sampling_rate(sampling_rate); volumefilter2.set_input_port(0, &msmergefilter, 0); volumefilter2.set_input_port(1, &msmergefilter, 1); volumefilter2.set_volume(.5); // Account for MS gain ATK::OutSndFileFilter<std::int16_t> sink("stereofile2.wav", 2); sink.set_input_sampling_rate(sampling_rate); sink.set_input_port(0, &volumefilter2, 0); sink.set_input_port(1, &volumefilter2, 1); sink.process(generator.get_frames()); return 0; }
//[ example_sinks_xml_file_final void init_logging() { // Create a text file sink boost::shared_ptr< file_sink > sink(new file_sink( keywords::file_name = "%Y%m%d_%H%M%S_%5N.xml", keywords::rotation_size = 16384 )); // Set up where the rotated files will be stored init_file_collecting(sink); // Upon restart, scan the directory for files matching the file_name pattern sink->locked_backend()->scan_for_files(); sink->set_formatter ( expr::format("\t<record id=\"%1%\" timestamp=\"%2%\">%3%</record>") % expr::attr< unsigned int >("RecordID") % expr::attr< boost::posix_time::ptime >("TimeStamp") % expr::xml_decor[ expr::stream << expr::smessage ] ); // Set header and footer writing functors namespace bll = boost::lambda; sink->locked_backend()->set_open_handler ( bll::_1 << "<?xml version=\"1.0\"?>\n<log>\n" ); sink->locked_backend()->set_close_handler ( bll::_1 << "</log>\n" ); // Add the sink to the core logging::core::get()->add_sink(sink); }
int main() { int t,i,j,k; int x[M][M], y[M][M]; int x3[M][M][M]; for (i = 1 ; i<=M; i++) { for (j = 1; j<=M; j++) { for (k = 1; k<=3; k++) { src(&(x3[i][j][k])); } } } for (i = 1 ; i<=M; i++) { for (j = 1; j<=M; j++) { for (k = 1; k<=3; k++) { if (k <= 2) { F1(&(x3[i][j][k]), &(x3[i][j][k])); } else { F2(&(x3[i][j][k]), &(x3[i][j][k])); } } } } for (i = 1 ; i<=M; i++) { for (j = 1; j<=M; j++) { for (k = 1; k<=3; k++) { sink(&(x3[i][j][k])); } } } return 0; }
QVariantMap QgsIntersectionAlgorithm::processAlgorithm( const QVariantMap ¶meters, QgsProcessingContext &context, QgsProcessingFeedback *feedback ) { std::unique_ptr< QgsFeatureSource > sourceA( parameterAsSource( parameters, QStringLiteral( "INPUT" ), context ) ); if ( !sourceA ) throw QgsProcessingException( invalidSourceError( parameters, QStringLiteral( "INPUT" ) ) ); std::unique_ptr< QgsFeatureSource > sourceB( parameterAsSource( parameters, QStringLiteral( "OVERLAY" ), context ) ); if ( !sourceB ) throw QgsProcessingException( invalidSourceError( parameters, QStringLiteral( "OVERLAY" ) ) ); QgsWkbTypes::Type geomType = QgsWkbTypes::multiType( sourceA->wkbType() ); const QStringList fieldsA = parameterAsFields( parameters, QStringLiteral( "INPUT_FIELDS" ), context ); const QStringList fieldsB = parameterAsFields( parameters, QStringLiteral( "OVERLAY_FIELDS" ), context ); QList<int> fieldIndicesA = QgsProcessingUtils::fieldNamesToIndices( fieldsA, sourceA->fields() ); QList<int> fieldIndicesB = QgsProcessingUtils::fieldNamesToIndices( fieldsB, sourceB->fields() ); QgsFields outputFields = QgsProcessingUtils::combineFields( QgsProcessingUtils::indicesToFields( fieldIndicesA, sourceA->fields() ), QgsProcessingUtils::indicesToFields( fieldIndicesB, sourceB->fields() ) ); QString dest; std::unique_ptr< QgsFeatureSink > sink( parameterAsSink( parameters, QStringLiteral( "OUTPUT" ), context, dest, outputFields, geomType, sourceA->sourceCrs() ) ); if ( !sink ) throw QgsProcessingException( invalidSinkError( parameters, QStringLiteral( "OUTPUT" ) ) ); QVariantMap outputs; outputs.insert( QStringLiteral( "OUTPUT" ), dest ); int count = 0; int total = sourceA->featureCount(); QgsOverlayUtils::intersection( *sourceA.get(), *sourceB.get(), *sink.get(), context, feedback, count, total, fieldIndicesA, fieldIndicesB ); return outputs; }
std::vector<MultiIndex> _extend(const std::vector<MultiIndex>& source, dim_t start, dim_t len) { assert (len != 0); if (len == 1) { std::vector<MultiIndex> next(source); for (std::size_t i = 0; i < source.size(); i++) { next[i][start] += 1; } return next; } else { // use divide and conquer approach std::vector<MultiIndex> lhs = _extend(source, start, len/2); std::vector<MultiIndex> rhs = _extend(source, start + len/2, len - len/2); std::vector<MultiIndex> sink(lhs.size() + rhs.size()); auto seek = strict_union(lhs.begin(), lhs.end(), rhs.begin(), rhs.end(), sink.begin(), std::less<MultiIndex>{}); sink.resize(seek - sink.begin()); return sink; } }
void saveModel(std::string modelname, std::map<std::string, size_t>& word2id, Eigen::MatrixXf& inner, Eigen::MatrixXf& outer) { std::ofstream sink(modelname.c_str()); if (!sink.good()) { std::cerr << "Error opening file " << modelname << std::endl; std::exit(-1); } sink << word2id.size() << std::endl; for (std::pair<const std::string, size_t>& kv : word2id) { sink << kv.first << "\t" << kv.second << std::endl; } sink << "inner vector" << std::endl; for (size_t i = 0; i < inner.rows(); ++i) { sink << inner.row(i) << std::endl; } sink << "outer vector" << std::endl; for (size_t i = 0; i < outer.rows(); ++i) { sink << outer.row(i) << std::endl; } }
boost::shared_ptr< sink_t > init_logging() { boost::shared_ptr< logging::core > core = logging::core::get(); // Create a backend and initialize it with a stream boost::shared_ptr< sinks::text_ostream_backend > backend = boost::make_shared< sinks::text_ostream_backend >(); backend->add_stream( boost::shared_ptr< std::ostream >(&std::clog, boost::empty_deleter())); // Wrap it into the frontend and register in the core boost::shared_ptr< sink_t > sink(new sink_t( backend, /*< pointer to the pre-initialized backend >*/ keywords::order = logging::make_attr_ordering("LineID", std::less< unsigned int >()), /*< log record ordering predicate >*/ keywords::ordering_window = boost::posix_time::seconds(1) /*< latency of log record processing >*/ )); core->add_sink(sink); // You can manage filtering and formatting through the sink interface sink->set_filter(expr::attr< severity_level >("Severity") >= warning); sink->set_formatter ( expr::stream << "Level: " << expr::attr< severity_level >("Severity") << " Message: " << expr::smessage ); // You can also manage backend in a thread-safe manner { sink_t::locked_backend_ptr p = sink->locked_backend(); p->add_stream(boost::make_shared< std::ofstream >("sample.log")); } // the backend gets released here return sink; }