std::shared_ptr<AVFrame> convert_video(core::read_frame& frame, AVCodecContext* c) { if(!sws_) { sws_.reset(sws_getContext(format_desc_.width, format_desc_.height, PIX_FMT_BGRA, c->width, c->height, c->pix_fmt, SWS_BICUBIC, nullptr, nullptr, nullptr), sws_freeContext); if (sws_ == nullptr) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Cannot initialize the conversion context")); } std::shared_ptr<AVFrame> in_frame(avcodec_alloc_frame(), av_free); auto in_picture = reinterpret_cast<AVPicture*>(in_frame.get()); if (key_only_) { key_picture_buf_.resize(frame.image_data().size()); in_picture->linesize[0] = format_desc_.width * 4; in_picture->data[0] = key_picture_buf_.data(); fast_memshfl(in_picture->data[0], frame.image_data().begin(), frame.image_data().size(), 0x0F0F0F0F, 0x0B0B0B0B, 0x07070707, 0x03030303); } else { avpicture_fill(in_picture, const_cast<uint8_t*>(frame.image_data().begin()), PIX_FMT_BGRA, format_desc_.width, format_desc_.height); } std::shared_ptr<AVFrame> out_frame(avcodec_alloc_frame(), av_free); picture_buf_.resize(avpicture_get_size(c->pix_fmt, c->width, c->height)); avpicture_fill(reinterpret_cast<AVPicture*>(out_frame.get()), picture_buf_.data(), c->pix_fmt, c->width, c->height); sws_scale(sws_.get(), in_frame->data, in_frame->linesize, 0, format_desc_.height, out_frame->data, out_frame->linesize); return out_frame; }
implementation(size_t output_channels, size_t input_channels, size_t output_sample_rate, size_t input_sample_rate, AVSampleFormat output_sample_format, AVSampleFormat input_sample_format) : output_channels_(output_channels) , output_sample_format_(output_sample_format) , input_channels_(input_channels) , input_sample_format_(input_sample_format) { if(input_channels != output_channels || input_sample_rate != output_sample_rate || input_sample_format != output_sample_format) { auto resampler = av_audio_resample_init(output_channels, input_channels, output_sample_rate, input_sample_rate, output_sample_format, input_sample_format, 16, 10, 0, 0.8); buffer2_.resize(AVCODEC_MAX_AUDIO_FRAME_SIZE*2); char sample_fmt_string[200]; av_get_sample_fmt_string(sample_fmt_string, 200, input_sample_format); CASPAR_LOG(warning) << L"[audio-resampler]" << L" sample-rate: " << input_sample_rate << L" channels: " << input_channels << L" sample-fmt: " << widen(sample_fmt_string); if(resampler) resampler_.reset(resampler, audio_resample_close); else BOOST_THROW_EXCEPTION(caspar_exception()); } }
decklink_producer(const core::video_format_desc& format_desc, size_t device_index, const safe_ptr<core::frame_factory>& frame_factory, const std::wstring& filter) : decklink_(get_device(device_index)) , input_(decklink_) , attributes_(decklink_) , model_name_(get_model_name(decklink_)) , device_index_(device_index) , filter_(filter) , format_desc_(format_desc) , audio_cadence_(format_desc.audio_cadence) , muxer_(format_desc.fps, frame_factory, filter) , sync_buffer_(format_desc.audio_cadence.size()) , frame_factory_(frame_factory) { hints_ = 0; frame_buffer_.set_capacity(2); graph_->set_color("tick-time", diagnostics::color(0.0f, 0.6f, 0.9f)); graph_->set_color("late-frame", diagnostics::color(0.6f, 0.3f, 0.3f)); graph_->set_color("frame-time", diagnostics::color(1.0f, 0.0f, 0.0f)); graph_->set_color("dropped-frame", diagnostics::color(0.3f, 0.6f, 0.3f)); graph_->set_color("output-buffer", diagnostics::color(0.0f, 1.0f, 0.0f)); graph_->set_text(print()); diagnostics::register_graph(graph_); auto display_mode = get_display_mode(input_, format_desc_.format, bmdFormat8BitYUV, bmdVideoInputFlagDefault); // NOTE: bmdFormat8BitARGB is currently not supported by any decklink card. (2011-05-08) if(FAILED(input_->EnableVideoInput(display_mode, bmdFormat8BitYUV, bmdVideoInputFlagDefault))) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(narrow(print()) + " Could not enable video input.") << boost::errinfo_api_function("EnableVideoInput")); if(FAILED(input_->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType32bitInteger, format_desc_.audio_channels))) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(narrow(print()) + " Could not enable audio input.") << boost::errinfo_api_function("EnableAudioInput")); if (FAILED(input_->SetCallback(this)) != S_OK) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(narrow(print()) + " Failed to set input callback.") << boost::errinfo_api_function("SetCallback")); if(FAILED(input_->StartStreams())) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(narrow(print()) + " Failed to start input stream.") << boost::errinfo_api_function("StartStreams")); }
std::shared_ptr<AVStream> add_audio_stream(std::vector<option>& options) { if(output_format_.acodec == CODEC_ID_NONE) return nullptr; auto st = av_new_stream(oc_.get(), 1); if(!st) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Could not allocate audio-stream") << boost::errinfo_api_function("av_new_stream")); auto encoder = avcodec_find_encoder(output_format_.acodec); if (!encoder) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("codec not found")); auto c = st->codec; avcodec_get_context_defaults3(c, encoder); c->codec_id = output_format_.acodec; c->codec_type = AVMEDIA_TYPE_AUDIO; c->sample_rate = 48000; c->channels = channel_layout_.num_channels; c->sample_fmt = SAMPLE_FMT_S16; if(output_format_.vcodec == CODEC_ID_FLV1) c->sample_rate = 44100; if(output_format_.format->flags & AVFMT_GLOBALHEADER) c->flags |= CODEC_FLAG_GLOBAL_HEADER; boost::range::remove_erase_if(options, [&](const option& o) { return ffmpeg::av_opt_set(c, o.name.c_str(), o.value.c_str(), AV_OPT_SEARCH_CHILDREN) > -1; }); THROW_ON_ERROR2(avcodec_open(c, encoder), "[ffmpeg_consumer]"); return std::shared_ptr<AVStream>(st, [](AVStream* st) { LOG_ON_ERROR2(avcodec_close(st->codec), "[ffmpeg_consumer]");; av_freep(&st->codec); av_freep(&st); }); }
newtek_ivga_consumer(core::channel_layout channel_layout) : executor_(print()) , channel_layout_(channel_layout) { if (!airsend::is_available()) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(narrow(airsend::dll_name()) + " not available")); connected_ = false; graph_->set_text(print()); graph_->set_color("frame-time", diagnostics::color(0.5f, 1.0f, 0.2f)); graph_->set_color("tick-time", diagnostics::color(0.0f, 0.6f, 0.9f)); diagnostics::register_graph(graph_); }
boost::filesystem::path get_relative(const boost::filesystem::path& file, const boost::filesystem::path& relative_to) { auto result = file.filename(); auto current_path = file; if (boost::filesystem::equivalent(current_path, relative_to)) return L""; while (true) { current_path = current_path.parent_path(); if (boost::filesystem::equivalent(current_path, relative_to)) break; if (current_path.empty()) CASPAR_THROW_EXCEPTION(caspar_exception() << msg_info("File " + file.string() + " not relative to folder " + relative_to.string())); result = current_path.filename() / result; } return result; }
implementation(const std::string& vertex_source_str, const std::string& fragment_source_str) : program_(0) { GLint success; const char* vertex_source = vertex_source_str.c_str(); auto vertex_shader = glCreateShaderObjectARB(GL_VERTEX_SHADER_ARB); GL(glShaderSourceARB(vertex_shader, 1, &vertex_source, NULL)); GL(glCompileShaderARB(vertex_shader)); GL(glGetObjectParameterivARB(vertex_shader, GL_OBJECT_COMPILE_STATUS_ARB, &success)); if (success == GL_FALSE) { char info[2048]; GL(glGetInfoLogARB(vertex_shader, sizeof(info), 0, info)); GL(glDeleteObjectARB(vertex_shader)); std::stringstream str; str << "Failed to compile vertex shader:" << std::endl << info << std::endl; BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(str.str())); } const char* fragment_source = fragment_source_str.c_str(); auto fragmemt_shader = glCreateShaderObjectARB(GL_FRAGMENT_SHADER_ARB); GL(glShaderSourceARB(fragmemt_shader, 1, &fragment_source, NULL)); GL(glCompileShaderARB(fragmemt_shader)); GL(glGetObjectParameterivARB(fragmemt_shader, GL_OBJECT_COMPILE_STATUS_ARB, &success)); if (success == GL_FALSE) { char info[2048]; GL(glGetInfoLogARB(fragmemt_shader, sizeof(info), 0, info)); GL(glDeleteObjectARB(fragmemt_shader)); std::stringstream str; str << "Failed to compile fragment shader:" << std::endl << info << std::endl; BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(str.str())); } program_ = glCreateProgramObjectARB(); GL(glAttachObjectARB(program_, vertex_shader)); GL(glAttachObjectARB(program_, fragmemt_shader)); GL(glLinkProgramARB(program_)); GL(glDeleteObjectARB(vertex_shader)); GL(glDeleteObjectARB(fragmemt_shader)); GL(glGetObjectParameterivARB(program_, GL_OBJECT_LINK_STATUS_ARB, &success)); if (success == GL_FALSE) { char info[2048]; GL(glGetInfoLogARB(program_, sizeof(info), 0, info)); GL(glDeleteObjectARB(program_)); std::stringstream str; str << "Failed to link shader program:" << std::endl << info << std::endl; BOOST_THROW_EXCEPTION(caspar_exception() << msg_info(str.str())); } GL(glUseProgramObjectARB(program_)); }
std::map<std::string, std::string> read_flv_meta_info(const std::string& filename) { std::map<std::string, std::string> values; if(boost::filesystem2::path(filename).extension() != ".flv") return values; try { if(!boost::filesystem2::exists(filename)) BOOST_THROW_EXCEPTION(caspar_exception()); std::fstream fileStream = std::fstream(filename, std::fstream::in); std::vector<char> bytes2(256); fileStream.read(bytes2.data(), bytes2.size()); auto ptr = bytes2.data(); ptr += 27; if(std::string(ptr, ptr+10) == "onMetaData") { ptr += 16; for(int n = 0; n < 16; ++n) { char name_size = *ptr++; if(name_size == 0) break; auto name = std::string(ptr, ptr + name_size); ptr += name_size; char data_type = *ptr++; switch(data_type) { case 0: // double { static_assert(sizeof(double) == 8, ""); std::reverse(ptr, ptr+8); values[name] = boost::lexical_cast<std::string>(*(double*)(ptr)); ptr += 9; break; } case 1: // bool { values[name] = boost::lexical_cast<std::string>(*ptr != 0); ptr += 2; break; } } } } } catch(...) { CASPAR_LOG_CURRENT_EXCEPTION(); } return values; }
std::shared_ptr<AVStream> add_video_stream(std::vector<option>& options) { if(output_format_.vcodec == CODEC_ID_NONE) return nullptr; auto st = av_new_stream(oc_.get(), 0); if (!st) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Could not allocate video-stream.") << boost::errinfo_api_function("av_new_stream")); auto encoder = avcodec_find_encoder(output_format_.vcodec); if (!encoder) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Codec not found.")); auto c = st->codec; avcodec_get_context_defaults3(c, encoder); c->codec_id = output_format_.vcodec; c->codec_type = AVMEDIA_TYPE_VIDEO; c->width = output_format_.width; c->height = output_format_.height; c->time_base.den = format_desc_.time_scale; c->time_base.num = format_desc_.duration; c->gop_size = 25; c->flags |= format_desc_.field_mode == core::field_mode::progressive ? 0 : (CODEC_FLAG_INTERLACED_ME | CODEC_FLAG_INTERLACED_DCT); if(c->pix_fmt == PIX_FMT_NONE) c->pix_fmt = PIX_FMT_YUV420P; if(c->codec_id == CODEC_ID_PRORES) { c->bit_rate = c->width < 1280 ? 63*1000000 : 220*1000000; c->pix_fmt = PIX_FMT_YUV422P10; } else if(c->codec_id == CODEC_ID_DNXHD) { if(c->width < 1280 || c->height < 720) BOOST_THROW_EXCEPTION(caspar_exception() << msg_info("Unsupported video dimensions.")); c->bit_rate = 220*1000000; c->pix_fmt = PIX_FMT_YUV422P; } else if(c->codec_id == CODEC_ID_DVVIDEO) { c->width = c->height == 1280 ? 960 : c->width; if(format_desc_.format == core::video_format::ntsc) c->pix_fmt = PIX_FMT_YUV411P; else if(format_desc_.format == core::video_format::pal) c->pix_fmt = PIX_FMT_YUV420P; else // dv50 c->pix_fmt = PIX_FMT_YUV422P; if(format_desc_.duration == 1001) c->width = c->height == 1080 ? 1280 : c->width; else c->width = c->height == 1080 ? 1440 : c->width; } else if(c->codec_id == CODEC_ID_H264) { c->pix_fmt = PIX_FMT_YUV420P; if(options.empty()) { av_opt_set(c->priv_data, "preset", "ultrafast", 0); av_opt_set(c->priv_data, "tune", "fastdecode", 0); av_opt_set(c->priv_data, "crf", "5", 0); } } else if(c->codec_id == CODEC_ID_QTRLE) { c->pix_fmt = PIX_FMT_ARGB; } c->max_b_frames = 0; // b-frames not supported. boost::range::remove_erase_if(options, [&](const option& o) { return ffmpeg::av_opt_set(c, o.name.c_str(), o.value.c_str(), AV_OPT_SEARCH_CHILDREN) > -1 || ffmpeg::av_opt_set(c->priv_data, o.name.c_str(), o.value.c_str(), AV_OPT_SEARCH_CHILDREN) > -1; }); if(output_format_.format->flags & AVFMT_GLOBALHEADER) c->flags |= CODEC_FLAG_GLOBAL_HEADER; c->thread_count = boost::thread::hardware_concurrency(); if(avcodec_open(c, encoder) < 0) { c->thread_count = 1; THROW_ON_ERROR2(avcodec_open(c, encoder), "[ffmpeg_consumer]"); } return std::shared_ptr<AVStream>(st, [](AVStream* st) { LOG_ON_ERROR2(avcodec_close(st->codec), "[ffmpeg_consumer]"); av_freep(&st->codec); av_freep(&st); }); }