Пример #1
0
	safe_ptr<core::basic_frame> get_frame(int hints)
	{
		if(exception_ != nullptr)
			std::rethrow_exception(exception_);

		hints_ = hints;

		safe_ptr<core::basic_frame> frame = core::basic_frame::late();
		if(!frame_buffer_.try_pop(frame))
			graph_->set_tag("late-frame");
		graph_->set_value("output-buffer", static_cast<float>(frame_buffer_.size())/static_cast<float>(frame_buffer_.capacity()));	
		return frame;
	}
Пример #2
0
 draw_frame last_frame() override
 {
     if (!frame_) {
         buffer_.try_pop(frame_);
     }
     return core::draw_frame::still(frame_);
 }
Пример #3
0
	channel_consumer()
		: consumer_index_(next_consumer_index())
		, first_frame_available_(first_frame_promise_.get_future())
		, first_frame_reported_(false)
	{
		is_running_ = true;
		current_age_ = 0;
		frame_buffer_.set_capacity(3);
	}
    void trigger(DataObject<D>& d) {
        boost::shared_lock_guard<boost::shared_mutex> lock(d._mtx_links);

        if (d._links.empty())
            return;

        for (auto &p : d._links)
            _tbbExecutionQueue.push(p.second);
    }
Пример #5
0
	std::shared_ptr<read_frame> receive()
	{
		if(!is_running_)
			return make_safe<read_frame>();
		std::shared_ptr<read_frame> frame;
		
		if (frame_buffer_.try_pop(frame))
			current_age_ = frame->get_age_millis();

		return frame;
	}
Пример #6
0
    route_producer(std::shared_ptr<route> route, int buffer)
        : route_(route)
        , connection_(route_->signal.connect([this](const core::draw_frame& frame) {
            if (!buffer_.try_push(frame)) {
                graph_->set_tag(diagnostics::tag_severity::WARNING, "dropped-frame");
            }
            graph_->set_value("produce-time", produce_timer_.elapsed() * route_->format_desc.fps * 0.5);
            produce_timer_.restart();
        }))
    {
        buffer_.set_capacity(buffer > 0 ? buffer : route->format_desc.field_count);

        graph_->set_color("late-frame", diagnostics::color(0.6f, 0.3f, 0.3f));
        graph_->set_color("produce-time", caspar::diagnostics::color(0.0f, 1.0f, 0.0f));
        graph_->set_color("consume-time", caspar::diagnostics::color(1.0f, 0.4f, 0.0f, 0.8f));
        graph_->set_color("dropped-frame", diagnostics::color(0.3f, 0.6f, 0.3f));
        graph_->set_text(print());
        diagnostics::register_graph(graph_);

        CASPAR_LOG(debug) << print() << L" Initialized";
    }
Пример #7
0
	virtual boost::unique_future<bool> send(const safe_ptr<read_frame>& frame) override
	{
		bool pushed = frame_buffer_.try_push(frame);

		if (pushed && !first_frame_reported_)
		{
			first_frame_promise_.set_value();
			first_frame_reported_ = true;
		}

		return caspar::wrap_as_future(is_running_.load());
	}
Пример #8
0
	virtual void send(const safe_ptr<basic_frame>& src_frame) override
	{
		bool pushed = frame_buffer_.try_push(src_frame);
//		frame_buffer_.push(src_frame);

		if (pushed && !first_frame_reported_) //changed to fix compilation
		//if (!first_frame_reported_)
		{
			first_frame_promise_.set_value();
			first_frame_reported_ = true;
		}
	}
Пример #9
0
    draw_frame receive_impl(int nb_samples) override
    {
        core::draw_frame frame;
        if (!buffer_.try_pop(frame)) {
            graph_->set_tag(diagnostics::tag_severity::WARNING, "late-frame");
        } else {
            frame_ = frame;
        }

        graph_->set_value("consume-time", consume_timer_.elapsed() * route_->format_desc.fps * 0.5);
        consume_timer_.restart();
        return frame;
    }
Пример #10
0
	safe_ptr<basic_frame> receive()
	{
//		comment out to avoid compiler err
		safe_ptr<basic_frame> frame;
//		bool ispoped = frame_buffer_.try_pop(frame);
//		if (frame_buffer_.size() > 0) {
			//frame_buffer_.pop();
//			return basic_frame::late();
//		}
		if (!frame_buffer_.try_pop(frame))
		{
			return basic_frame::late();
		}
		return frame;
	}
Пример #11
0
	oal_consumer() 
		: container_(16)
		, channel_index_(-1)
		, started_(false)
		, channel_layout_(
				core::default_channel_layout_repository().get_by_name(
						L"STEREO"))
	{
		graph_->set_color("tick-time", diagnostics::color(0.0f, 0.6f, 0.9f));	
		graph_->set_color("dropped-frame", diagnostics::color(0.3f, 0.6f, 0.3f));
		diagnostics::register_graph(graph_);

		is_running_ = true;
		presentation_age_ = 0;
		input_.set_capacity(2);
	}
Пример #12
0
	decklink_producer(const core::video_format_desc& format_desc, size_t device_index, const safe_ptr<core::frame_factory>& frame_factory, const std::wstring& filter)
		: decklink_(get_device(device_index))
		, input_(decklink_)
		, attributes_(decklink_)
		, model_name_(get_model_name(decklink_))
		, device_index_(device_index)
		, filter_(filter)
		, format_desc_(format_desc)
		, audio_cadence_(format_desc.audio_cadence)
		, muxer_(format_desc.fps, frame_factory, filter)
		, sync_buffer_(format_desc.audio_cadence.size())
		, frame_factory_(frame_factory)
	{		
		hints_ = 0;
		frame_buffer_.set_capacity(2);
		
		graph_->set_color("tick-time", diagnostics::color(0.0f, 0.6f, 0.9f));	
		graph_->set_color("late-frame", diagnostics::color(0.6f, 0.3f, 0.3f));
		graph_->set_color("frame-time", diagnostics::color(1.0f, 0.0f, 0.0f));
		graph_->set_color("dropped-frame", diagnostics::color(0.3f, 0.6f, 0.3f));
		graph_->set_color("output-buffer", diagnostics::color(0.0f, 1.0f, 0.0f));
		graph_->set_text(print());
		diagnostics::register_graph(graph_);
		
		auto display_mode = get_display_mode(input_, format_desc_.format, bmdFormat8BitYUV, bmdVideoInputFlagDefault);
				
		// NOTE: bmdFormat8BitARGB is currently not supported by any decklink card. (2011-05-08)
		if(FAILED(input_->EnableVideoInput(display_mode, bmdFormat8BitYUV, bmdVideoInputFlagDefault))) 
			BOOST_THROW_EXCEPTION(caspar_exception() 
									<< msg_info(narrow(print()) + " Could not enable video input.")
									<< boost::errinfo_api_function("EnableVideoInput"));

		if(FAILED(input_->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType32bitInteger, format_desc_.audio_channels))) 
			BOOST_THROW_EXCEPTION(caspar_exception() 
									<< msg_info(narrow(print()) + " Could not enable audio input.")
									<< boost::errinfo_api_function("EnableAudioInput"));
			
		if (FAILED(input_->SetCallback(this)) != S_OK)
			BOOST_THROW_EXCEPTION(caspar_exception() 
									<< msg_info(narrow(print()) + " Failed to set input callback.")
									<< boost::errinfo_api_function("SetCallback"));
			
		if(FAILED(input_->StartStreams()))
			BOOST_THROW_EXCEPTION(caspar_exception() 
									<< msg_info(narrow(print()) + " Failed to start input stream.")
									<< boost::errinfo_api_function("StartStreams"));
	}
Пример #13
0
	virtual boost::unique_future<bool> send(const safe_ptr<core::read_frame>& frame) override
	{
		auto buffer = std::make_shared<audio_buffer_16>(
			core::audio_32_to_16(core::get_rearranged_and_mixed(frame->multichannel_view(), channel_layout_, channel_layout_.num_channels)));

		if (!input_.try_push(std::make_pair(frame, buffer)))
			graph_->set_tag("dropped-frame");

		if (Status() != Playing && !started_)
		{
			sf::SoundStream::Initialize(2, format_desc_.audio_sample_rate);
			Play();
			started_ = true;
		}

		return wrap_as_future(is_running_.load());
	}
    // Call all DOs which are linked to that DOs which have been triggered
    // These method is typically private and called with in a thread related to a priority
    // This thread is typically waiting on a synchronization element
    void run(unsigned inst) {
        std::function<void()> f;

        try {

#ifdef __linux__
            Logger::pLOG->info("DOR-THRD-{} has TID-{}", inst, syscall(SYS_gettid));
#endif

            while (_run_state) {
                _tbbExecutionQueue.pop(f); // Pop of concurrent_bounded_queue waits if queue empty
                f();
            }
        } catch(const tbb::user_abort& abortException) {
            Logger::pLOG->info("Abort DOR-THRD-{}", inst);
            _run_state = false;
        }
        Logger::pLOG->info("DOR-THRD-{} has stopped", inst);
    }
Пример #15
0
	virtual bool OnGetData(sf::SoundStream::Chunk& data) override
	{		
		win32_exception::ensure_handler_installed_for_thread(
				"sfml-audio-thread");
		std::pair<std::shared_ptr<core::read_frame>, std::shared_ptr<audio_buffer_16>> audio_data;

		input_.pop(audio_data); // Block until available

		graph_->set_value("tick-time", perf_timer_.elapsed()*format_desc_.fps*0.5);		
		perf_timer_.restart();

		container_.push_back(std::move(*audio_data.second));
		data.Samples = container_.back().data();
		data.NbSamples = container_.back().size();	
		

		if (audio_data.first)
			presentation_age_ = audio_data.first->get_age_millis();

		return is_running_;
	}
 ~DataObjectReactor() {
     Logger::pLOG->info("Delete DOR");
     _run_state = false;
     _tbbExecutionQueue.abort(); // Stops waiting of pop() in run()
 }
Пример #17
0
bool computeBiasFeaturesHelper(ParserT& parser,
                               tbb::concurrent_bounded_queue<TranscriptFeatures>& featQueue,
                               size_t& numComplete, size_t numThreads) {

    using stream_manager = jellyfish::stream_manager<std::vector<std::string>::iterator>;
    using sequence_parser = jellyfish::whole_sequence_parser<stream_manager>;

    size_t merLen = 2;
    Kmer lshift(2 * (merLen - 1));
    Kmer masq((1UL << (2 * merLen)) - 1);
    std::atomic<size_t> readNum{0};

    size_t numActors = numThreads;
    std::vector<std::thread> threads;
    auto tstart = std::chrono::steady_clock::now();

    for (auto i : boost::irange(size_t{0}, numActors)) {
        threads.push_back(std::thread(
	        [&featQueue, &numComplete, &parser, &readNum, &tstart, lshift, masq, merLen, numActors]() -> void {

                size_t cmlen, numKmers;
                jellyfish::mer_dna_ns::mer_base_dynamic<uint64_t> kmer(merLen);

                // while there are transcripts left to process
                while (true) { //producer.nextRead(s)) {
                    sequence_parser::job j(parser);
                    // If this job is empty, then we're done
                    if (j.is_empty()) { return; }

                    for (size_t i=0; i < j->nb_filled; ++i) {
                        ++readNum;
                        if (readNum % 100 == 0) {
                            auto tend = std::chrono::steady_clock::now();
                            auto sec = std::chrono::duration_cast<std::chrono::seconds>(tend-tstart);
                            auto nsec = sec.count();
                            auto rate = (nsec > 0) ? readNum / sec.count() : 0;
                            std::cerr << "processed " << readNum << " transcripts (" << rate << ") transcripts/s\r\r";
                        }

                        // we iterate over the entire read
                        const char* start     = j->data[i].seq.c_str();
                        uint32_t readLen      = j->data[i].seq.size();
                        const char* const end = start + readLen;

                        TranscriptFeatures tfeat{};

                        // reset all of the counts
                        numKmers = 0;
                        cmlen = 0;
                        kmer.polyA();

                        // the maximum number of kmers we'd have to store
                        uint32_t maxNumKmers = (readLen >= merLen) ? readLen - merLen + 1 : 0;
                        if (maxNumKmers == 0) { featQueue.push(tfeat); continue; }

                        // The transcript name
                        std::string fullHeader(j->data[i].header);
                        tfeat.name = fullHeader.substr(0, fullHeader.find(' '));
                        tfeat.length = readLen;
                        auto nfact = 1.0 / readLen;

                        // iterate over the read base-by-base
                        size_t offset{0};
                        size_t numChars{j->data[i].seq.size()};
                        while (offset < numChars) {
                            auto c = jellyfish::mer_dna::code(j->data[i].seq[offset]);
                            kmer.shift_left(c);
                            if (jellyfish::mer_dna::not_dna(c)) {
                                cmlen = 0;
                                ++offset;
                                continue;
                            }
                            if (++cmlen >= merLen) {
                                size_t twomer = kmer.get_bits(0, 2*merLen);
                                tfeat.diNucleotides[twomer]++;
                                switch(c) {
                                    case jellyfish::mer_dna::CODE_G:
                                    case jellyfish::mer_dna::CODE_C:
                                        tfeat.gcContent += nfact;
                                        break;
                                }
                            }
                            ++offset;
                        } // end while

                        char lastBase = j->data[i].seq.back();
                        auto c = jellyfish::mer_dna::code(lastBase);
                        switch(c) {
                            case jellyfish::mer_dna::CODE_G:
                            case jellyfish::mer_dna::CODE_C:
                                tfeat.gcContent += nfact;
                                break;
                        }

                        featQueue.push(tfeat);
                    } // end job
                } // end while(true)
            } // end lambda
            ));

        } // actor loop

        for (auto& t : threads) { t.join(); ++numComplete; }
        return true;
}
Пример #18
0
	virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(IDeckLinkVideoInputFrame* video, IDeckLinkAudioInputPacket* audio)
	{	
		if(!video)
			return S_OK;

		try
		{
			graph_->set_value("tick-time", tick_timer_.elapsed()*format_desc_.fps*0.5);
			tick_timer_.restart();

			frame_timer_.restart();

			// PUSH

			void* bytes = nullptr;
			if(FAILED(video->GetBytes(&bytes)) || !bytes)
				return S_OK;
			
			safe_ptr<AVFrame> av_frame(avcodec_alloc_frame(), av_free);	
			avcodec_get_frame_defaults(av_frame.get());
						
			av_frame->data[0]			= reinterpret_cast<uint8_t*>(bytes);
			av_frame->linesize[0]		= video->GetRowBytes();			
			av_frame->format			= PIX_FMT_UYVY422;
			av_frame->width				= video->GetWidth();
			av_frame->height			= video->GetHeight();
			av_frame->interlaced_frame	= format_desc_.field_mode != core::field_mode::progressive;
			av_frame->top_field_first	= format_desc_.field_mode == core::field_mode::upper ? 1 : 0;
				
			std::shared_ptr<core::audio_buffer> audio_buffer;

			// It is assumed that audio is always equal or ahead of video.
			if(audio && SUCCEEDED(audio->GetBytes(&bytes)) && bytes)
			{
				auto sample_frame_count = audio->GetSampleFrameCount();
				auto audio_data = reinterpret_cast<int32_t*>(bytes);
				audio_buffer = std::make_shared<core::audio_buffer>(audio_data, audio_data + sample_frame_count*format_desc_.audio_channels);
			}
			else			
				audio_buffer = std::make_shared<core::audio_buffer>(audio_cadence_.front(), 0);
			
			// Note: Uses 1 step rotated cadence for 1001 modes (1602, 1602, 1601, 1602, 1601)
			// This cadence fills the audio mixer most optimally.

			sync_buffer_.push_back(audio_buffer->size());		
			if(!boost::range::equal(sync_buffer_, audio_cadence_))
			{
				CASPAR_LOG(trace) << print() << L" Syncing audio.";
				return S_OK;
			}

			muxer_.push(audio_buffer);
			muxer_.push(av_frame, hints_);	
											
			boost::range::rotate(audio_cadence_, std::begin(audio_cadence_)+1);
			
			// POLL
			
			for(auto frame = muxer_.poll(); frame; frame = muxer_.poll())
			{
				if(!frame_buffer_.try_push(make_safe_ptr(frame)))
				{
					auto dummy = core::basic_frame::empty();
					frame_buffer_.try_pop(dummy);

					frame_buffer_.try_push(make_safe_ptr(frame));

					graph_->set_tag("dropped-frame");
				}
			}

			graph_->set_value("frame-time", frame_timer_.elapsed()*format_desc_.fps*0.5);

			graph_->set_value("output-buffer", static_cast<float>(frame_buffer_.size())/static_cast<float>(frame_buffer_.capacity()));	
		}
		catch(...)
		{
			exception_ = std::current_exception();
			return E_FAIL;
		}

		return S_OK;
	}
Пример #19
0
	void stop()
	{
		is_running_ = false;
		frame_buffer_.try_push(make_safe<read_frame>());
	}
Пример #20
0
	layer_consumer() : first_frame_reported_(false)
	{
		first_frame_available_ = first_frame_promise_.get_future();
		frame_buffer_.set_capacity(2);
	}