static int decklink_construct_vanc(AVFormatContext *avctx, struct decklink_ctx *ctx, AVPacket *pkt, decklink_frame *frame) { struct klvanc_line_set_s vanc_lines = { 0 }; int ret = 0, i; if (!ctx->supports_vanc) return 0; construct_cc(avctx, ctx, pkt, &vanc_lines); IDeckLinkVideoFrameAncillary *vanc; int result = ctx->dlo->CreateAncillaryData(bmdFormat10BitYUV, &vanc); if (result != S_OK) { av_log(avctx, AV_LOG_ERROR, "Failed to create vanc\n"); ret = AVERROR(EIO); goto done; } /* Now that we've got all the VANC lines in a nice orderly manner, generate the final VANC sections for the Decklink output */ for (i = 0; i < vanc_lines.num_lines; i++) { struct klvanc_line_s *line = vanc_lines.lines[i]; int real_line; void *buf; if (!line) break; /* FIXME: include hack for certain Decklink cards which mis-represent line numbers for pSF frames */ real_line = line->line_number; result = vanc->GetBufferForVerticalBlankingLine(real_line, &buf); if (result != S_OK) { av_log(avctx, AV_LOG_ERROR, "Failed to get VANC line %d: %d", real_line, result); continue; } /* Generate the full line taking into account all VANC packets on that line */ result = klvanc_generate_vanc_line_v210(ctx->vanc_ctx, line, (uint8_t *) buf, ctx->bmd_width); if (result) { av_log(avctx, AV_LOG_ERROR, "Failed to generate VANC line\n"); continue; } } result = frame->SetAncillaryData(vanc); vanc->Release(); if (result != S_OK) { av_log(avctx, AV_LOG_ERROR, "Failed to set vanc: %d", result); ret = AVERROR(EIO); } done: for (i = 0; i < vanc_lines.num_lines; i++) klvanc_line_free(vanc_lines.lines[i]); return ret; }
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived( IDeckLinkVideoInputFrame *videoframe, IDeckLinkAudioInputPacket *audioframe ) { decklink_ctx_t *decklink_ctx = &decklink_opts_->decklink_ctx; obe_raw_frame_t *raw_frame = NULL; AVPacket pkt; AVFrame *frame = NULL; void *frame_bytes, *anc_line; obe_t *h = decklink_ctx->h; int finished = 0, ret, num_anc_lines = 0, anc_line_stride, lines_read = 0, first_line = 0, last_line = 0, line, num_vbi_lines, vii_line; uint32_t *frame_ptr; uint16_t *anc_buf, *anc_buf_pos; uint8_t *vbi_buf; int anc_lines[DECKLINK_VANC_LINES]; IDeckLinkVideoFrameAncillary *ancillary; BMDTimeValue stream_time, frame_duration; if( decklink_opts_->probe_success ) return S_OK; av_init_packet( &pkt ); if( videoframe ) { if( videoframe->GetFlags() & bmdFrameHasNoInputSource ) { syslog( LOG_ERR, "Decklink card index %i: No input signal detected", decklink_opts_->card_idx ); return S_OK; } else if( decklink_opts_->probe ) decklink_opts_->probe_success = 1; /* use SDI ticks as clock source */ videoframe->GetStreamTime( &stream_time, &frame_duration, OBE_CLOCK ); obe_clock_tick( h, (int64_t)stream_time ); if( decklink_ctx->last_frame_time == -1 ) decklink_ctx->last_frame_time = obe_mdate(); else { int64_t cur_frame_time = obe_mdate(); if( cur_frame_time - decklink_ctx->last_frame_time >= SDI_MAX_DELAY ) { syslog( LOG_WARNING, "Decklink card index %i: No frame received for %"PRIi64" ms", decklink_opts_->card_idx, (cur_frame_time - decklink_ctx->last_frame_time) / 1000 ); pthread_mutex_lock( &h->drop_mutex ); h->encoder_drop = h->mux_drop = 1; pthread_mutex_unlock( &h->drop_mutex ); } decklink_ctx->last_frame_time = cur_frame_time; } const int width = videoframe->GetWidth(); const int height = videoframe->GetHeight(); const int stride = videoframe->GetRowBytes(); videoframe->GetBytes( &frame_bytes ); /* TODO: support format switching (rare in SDI) */ int j; for( j = 0; first_active_line[j].format != -1; j++ ) { if( decklink_opts_->video_format == first_active_line[j].format ) break; } videoframe->GetAncillaryData( &ancillary ); /* NTSC starts on line 4 */ line = decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC ? 4 : 1; anc_line_stride = FFALIGN( (width * 2 * sizeof(uint16_t)), 16 ); /* Overallocate slightly for VANC buffer * Some VBI services stray into the active picture so allocate some extra space */ anc_buf = anc_buf_pos = (uint16_t*)av_malloc( DECKLINK_VANC_LINES * anc_line_stride ); if( !anc_buf ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } while( 1 ) { /* Some cards have restrictions on what lines can be accessed so try them all * Some buggy decklink cards will randomly refuse access to a particular line so * work around this issue by blanking the line */ if( ancillary->GetBufferForVerticalBlankingLine( line, &anc_line ) == S_OK ) decklink_ctx->unpack_line( (uint32_t*)anc_line, anc_buf_pos, width ); else decklink_ctx->blank_line( anc_buf_pos, width ); anc_buf_pos += anc_line_stride / 2; anc_lines[num_anc_lines++] = line; if( !first_line ) first_line = line; last_line = line; lines_read++; line = sdi_next_line( decklink_opts_->video_format, line ); if( line == first_active_line[j].line ) break; } ancillary->Release(); if( !decklink_opts_->probe ) { raw_frame = new_raw_frame(); if( !raw_frame ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } } anc_buf_pos = anc_buf; for( int i = 0; i < num_anc_lines; i++ ) { parse_vanc_line( h, &decklink_ctx->non_display_parser, raw_frame, anc_buf_pos, width, anc_lines[i] ); anc_buf_pos += anc_line_stride / 2; } if( IS_SD( decklink_opts_->video_format ) && first_line != last_line ) { /* Add a some VBI lines to the ancillary buffer */ frame_ptr = (uint32_t*)frame_bytes; /* NTSC starts from line 283 so add an extra line */ num_vbi_lines = NUM_ACTIVE_VBI_LINES + ( decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC ); for( int i = 0; i < num_vbi_lines; i++ ) { decklink_ctx->unpack_line( frame_ptr, anc_buf_pos, width ); anc_buf_pos += anc_line_stride / 2; frame_ptr += stride / 4; last_line = sdi_next_line( decklink_opts_->video_format, last_line ); } num_anc_lines += num_vbi_lines; vbi_buf = (uint8_t*)av_malloc( width * 2 * num_anc_lines ); if( !vbi_buf ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } /* Scale the lines from 10-bit to 8-bit */ decklink_ctx->downscale_line( anc_buf, vbi_buf, num_anc_lines ); anc_buf_pos = anc_buf; /* Handle Video Index information */ int tmp_line = first_line; vii_line = decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC ? NTSC_VIDEO_INDEX_LINE : PAL_VIDEO_INDEX_LINE; while( tmp_line < vii_line ) { anc_buf_pos += anc_line_stride / 2; tmp_line++; } if( decode_video_index_information( h, &decklink_ctx->non_display_parser, anc_buf_pos, raw_frame, vii_line ) < 0 ) goto fail; if( !decklink_ctx->has_setup_vbi ) { vbi_raw_decoder_init( &decklink_ctx->non_display_parser.vbi_decoder ); decklink_ctx->non_display_parser.ntsc = decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC; decklink_ctx->non_display_parser.vbi_decoder.start[0] = first_line; decklink_ctx->non_display_parser.vbi_decoder.start[1] = sdi_next_line( decklink_opts_->video_format, first_line ); decklink_ctx->non_display_parser.vbi_decoder.count[0] = last_line - decklink_ctx->non_display_parser.vbi_decoder.start[1] + 1; decklink_ctx->non_display_parser.vbi_decoder.count[1] = decklink_ctx->non_display_parser.vbi_decoder.count[0]; if( setup_vbi_parser( &decklink_ctx->non_display_parser ) < 0 ) goto fail; decklink_ctx->has_setup_vbi = 1; } if( decode_vbi( h, &decklink_ctx->non_display_parser, vbi_buf, raw_frame ) < 0 ) goto fail; av_free( vbi_buf ); } av_free( anc_buf ); if( !decklink_opts_->probe ) { frame = avcodec_alloc_frame(); if( !frame ) { syslog( LOG_ERR, "[decklink]: Could not allocate video frame\n" ); goto end; } decklink_ctx->codec->width = width; decklink_ctx->codec->height = height; pkt.data = (uint8_t*)frame_bytes; pkt.size = stride * height; ret = avcodec_decode_video2( decklink_ctx->codec, frame, &finished, &pkt ); if( ret < 0 || !finished ) { syslog( LOG_ERR, "[decklink]: Could not decode video frame\n" ); goto end; } raw_frame->release_data = obe_release_video_data; raw_frame->release_frame = obe_release_frame; memcpy( raw_frame->alloc_img.stride, frame->linesize, sizeof(raw_frame->alloc_img.stride) ); memcpy( raw_frame->alloc_img.plane, frame->data, sizeof(raw_frame->alloc_img.plane) ); avcodec_free_frame( &frame ); raw_frame->alloc_img.csp = (int)decklink_ctx->codec->pix_fmt; raw_frame->alloc_img.planes = av_pix_fmt_descriptors[raw_frame->alloc_img.csp].nb_components; raw_frame->alloc_img.width = width; raw_frame->alloc_img.height = height; raw_frame->alloc_img.format = decklink_opts_->video_format; raw_frame->timebase_num = decklink_opts_->timebase_num; raw_frame->timebase_den = decklink_opts_->timebase_den; memcpy( &raw_frame->img, &raw_frame->alloc_img, sizeof(raw_frame->alloc_img) ); if( IS_SD( decklink_opts_->video_format ) ) { if( raw_frame->alloc_img.height == 486 ) raw_frame->img.height = 480; raw_frame->img.first_line = first_active_line[j].line; } /* If AFD is present and the stream is SD this will be changed in the video filter */ raw_frame->sar_width = raw_frame->sar_height = 1; raw_frame->pts = stream_time; for( int i = 0; i < decklink_ctx->device->num_input_streams; i++ ) { if( decklink_ctx->device->streams[i]->stream_format == VIDEO_UNCOMPRESSED ) raw_frame->input_stream_id = decklink_ctx->device->streams[i]->input_stream_id; } if( add_to_filter_queue( h, raw_frame ) < 0 ) goto fail; if( send_vbi_and_ttx( h, &decklink_ctx->non_display_parser, raw_frame->pts ) < 0 ) goto fail; decklink_ctx->non_display_parser.num_vbi = 0; decklink_ctx->non_display_parser.num_anc_vbi = 0; } } /* TODO: probe SMPTE 337M audio */ if( audioframe && !decklink_opts_->probe ) { audioframe->GetBytes( &frame_bytes ); raw_frame = new_raw_frame(); if( !raw_frame ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } raw_frame->audio_frame.num_samples = audioframe->GetSampleFrameCount(); raw_frame->audio_frame.num_channels = decklink_opts_->num_channels; raw_frame->audio_frame.sample_fmt = AV_SAMPLE_FMT_S32P; if( av_samples_alloc( raw_frame->audio_frame.audio_data, &raw_frame->audio_frame.linesize, decklink_opts_->num_channels, raw_frame->audio_frame.num_samples, (AVSampleFormat)raw_frame->audio_frame.sample_fmt, 0 ) < 0 ) { syslog( LOG_ERR, "Malloc failed\n" ); return -1; } if( avresample_convert( decklink_ctx->avr, raw_frame->audio_frame.audio_data, raw_frame->audio_frame.linesize, raw_frame->audio_frame.num_samples, (uint8_t**)&frame_bytes, 0, raw_frame->audio_frame.num_samples ) < 0 ) { syslog( LOG_ERR, "[decklink] Sample format conversion failed\n" ); return -1; } BMDTimeValue packet_time; audioframe->GetPacketTime( &packet_time, OBE_CLOCK ); raw_frame->pts = packet_time; raw_frame->release_data = obe_release_audio_data; raw_frame->release_frame = obe_release_frame; for( int i = 0; i < decklink_ctx->device->num_input_streams; i++ ) { if( decklink_ctx->device->streams[i]->stream_format == AUDIO_PCM ) raw_frame->input_stream_id = decklink_ctx->device->streams[i]->input_stream_id; } if( add_to_filter_queue( decklink_ctx->h, raw_frame ) < 0 ) goto fail; } end: if( frame ) avcodec_free_frame( &frame ); av_free_packet( &pkt ); return S_OK; fail: if( raw_frame ) { raw_frame->release_data( raw_frame ); raw_frame->release_frame( raw_frame ); } return S_OK; }
HRESULT decklink_input_callback::VideoInputFrameArrived( IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame) { void *frameBytes; void *audioFrameBytes; BMDTimeValue frameTime; BMDTimeValue frameDuration; int64_t wallclock = 0; ctx->frameCount++; if (ctx->audio_pts_source == PTS_SRC_WALLCLOCK || ctx->video_pts_source == PTS_SRC_WALLCLOCK) wallclock = av_gettime_relative(); // Handle Video Frame if (videoFrame) { AVPacket pkt; av_init_packet(&pkt); if (ctx->frameCount % 25 == 0) { unsigned long long qsize = avpacket_queue_size(&ctx->queue); av_log(avctx, AV_LOG_DEBUG, "Frame received (#%lu) - Valid (%liB) - QSize %fMB\n", ctx->frameCount, videoFrame->GetRowBytes() * videoFrame->GetHeight(), (double)qsize / 1024 / 1024); } videoFrame->GetBytes(&frameBytes); videoFrame->GetStreamTime(&frameTime, &frameDuration, ctx->video_st->time_base.den); if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) { if (ctx->draw_bars && videoFrame->GetPixelFormat() == bmdFormat8BitYUV) { unsigned bars[8] = { 0xEA80EA80, 0xD292D210, 0xA910A9A5, 0x90229035, 0x6ADD6ACA, 0x51EF515A, 0x286D28EF, 0x10801080 }; int width = videoFrame->GetWidth(); int height = videoFrame->GetHeight(); unsigned *p = (unsigned *)frameBytes; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x += 2) *p++ = bars[(x * 8) / width]; } } if (!no_video) { av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - No input signal detected " "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped); } no_video = 1; } else { if (no_video) { av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - Input returned " "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped); } no_video = 0; } pkt.pts = get_pkt_pts(videoFrame, audioFrame, wallclock, ctx->video_pts_source, ctx->video_st->time_base, &initial_video_pts); pkt.dts = pkt.pts; pkt.duration = frameDuration; //To be made sure it still applies pkt.flags |= AV_PKT_FLAG_KEY; pkt.stream_index = ctx->video_st->index; pkt.data = (uint8_t *)frameBytes; pkt.size = videoFrame->GetRowBytes() * videoFrame->GetHeight(); //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts); #if CONFIG_LIBZVBI if (!no_video && ctx->teletext_lines && videoFrame->GetPixelFormat() == bmdFormat8BitYUV && videoFrame->GetWidth() == 720) { IDeckLinkVideoFrameAncillary *vanc; AVPacket txt_pkt; uint8_t txt_buf0[1611]; // max 35 * 46 bytes decoded teletext lines + 1 byte data_identifier uint8_t *txt_buf = txt_buf0; if (videoFrame->GetAncillaryData(&vanc) == S_OK) { int i; int64_t line_mask = 1; txt_buf[0] = 0x10; // data_identifier - EBU_data txt_buf++; for (i = 6; i < 336; i++, line_mask <<= 1) { uint8_t *buf; if ((ctx->teletext_lines & line_mask) && vanc->GetBufferForVerticalBlankingLine(i, (void**)&buf) == S_OK) { if (teletext_data_unit_from_vbi_data(i, buf, txt_buf) >= 0) txt_buf += 46; } if (i == 22) i = 317; } vanc->Release(); if (txt_buf - txt_buf0 > 1) { int stuffing_units = (4 - ((45 + txt_buf - txt_buf0) / 46) % 4) % 4; while (stuffing_units--) { memset(txt_buf, 0xff, 46); txt_buf[1] = 0x2c; // data_unit_length txt_buf += 46; } av_init_packet(&txt_pkt); txt_pkt.pts = pkt.pts; txt_pkt.dts = pkt.dts; txt_pkt.stream_index = ctx->teletext_st->index; txt_pkt.data = txt_buf0; txt_pkt.size = txt_buf - txt_buf0; if (avpacket_queue_put(&ctx->queue, &txt_pkt) < 0) { ++ctx->dropped; } } } } #endif if (avpacket_queue_put(&ctx->queue, &pkt) < 0) { ++ctx->dropped; } } // Handle Audio Frame if (audioFrame) { AVPacket pkt; BMDTimeValue audio_pts; av_init_packet(&pkt); //hack among hacks pkt.size = audioFrame->GetSampleFrameCount() * ctx->audio_st->codecpar->channels * (16 / 8); audioFrame->GetBytes(&audioFrameBytes); audioFrame->GetPacketTime(&audio_pts, ctx->audio_st->time_base.den); pkt.pts = get_pkt_pts(videoFrame, audioFrame, wallclock, ctx->audio_pts_source, ctx->audio_st->time_base, &initial_audio_pts); pkt.dts = pkt.pts; //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts); pkt.flags |= AV_PKT_FLAG_KEY; pkt.stream_index = ctx->audio_st->index; pkt.data = (uint8_t *)audioFrameBytes; if (avpacket_queue_put(&ctx->queue, &pkt) < 0) { ++ctx->dropped; } } return S_OK; }
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame) { demux_sys_t *sys = demux_->p_sys; if (videoFrame) { if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) { msg_Warn(demux_, "No input signal detected"); return S_OK; } const int width = videoFrame->GetWidth(); const int height = videoFrame->GetHeight(); const int stride = videoFrame->GetRowBytes(); int bpp = sys->tenbits ? 4 : 2; block_t *video_frame = block_Alloc(width * height * bpp); if (!video_frame) return S_OK; const uint32_t *frame_bytes; videoFrame->GetBytes((void**)&frame_bytes); BMDTimeValue stream_time, frame_duration; videoFrame->GetStreamTime(&stream_time, &frame_duration, CLOCK_FREQ); video_frame->i_flags = BLOCK_FLAG_TYPE_I | sys->dominance_flags; video_frame->i_pts = video_frame->i_dts = VLC_TS_0 + stream_time; if (sys->tenbits) { v210_convert((uint16_t*)video_frame->p_buffer, frame_bytes, width, height); IDeckLinkVideoFrameAncillary *vanc; if (videoFrame->GetAncillaryData(&vanc) == S_OK) { for (int i = 1; i < 21; i++) { uint32_t *buf; if (vanc->GetBufferForVerticalBlankingLine(i, (void**)&buf) != S_OK) break; uint16_t dec[width * 2]; v210_convert(&dec[0], buf, width, 1); block_t *cc = vanc_to_cc(demux_, dec, width * 2); if (!cc) continue; cc->i_pts = cc->i_dts = VLC_TS_0 + stream_time; if (!sys->cc_es) { es_format_t fmt; es_format_Init( &fmt, SPU_ES, VLC_CODEC_EIA608_1 ); fmt.psz_description = strdup(N_("Closed captions 1")); if (fmt.psz_description) { sys->cc_es = es_out_Add(demux_->out, &fmt); msg_Dbg(demux_, "Adding Closed captions stream"); } } if (sys->cc_es) es_out_Send(demux_->out, sys->cc_es, cc); else block_Release(cc); break; // we found the line with Closed Caption data } vanc->Release(); } } else { for (int y = 0; y < height; ++y) { const uint8_t *src = (const uint8_t *)frame_bytes + stride * y; uint8_t *dst = video_frame->p_buffer + width * 2 * y; memcpy(dst, src, width * 2); } } vlc_mutex_lock(&sys->pts_lock); if (video_frame->i_pts > sys->last_pts) sys->last_pts = video_frame->i_pts; vlc_mutex_unlock(&sys->pts_lock); es_out_Control(demux_->out, ES_OUT_SET_PCR, video_frame->i_pts); es_out_Send(demux_->out, sys->video_es, video_frame); } if (audioFrame) { const int bytes = audioFrame->GetSampleFrameCount() * sizeof(int16_t) * sys->channels; block_t *audio_frame = block_Alloc(bytes); if (!audio_frame) return S_OK; void *frame_bytes; audioFrame->GetBytes(&frame_bytes); memcpy(audio_frame->p_buffer, frame_bytes, bytes); BMDTimeValue packet_time; audioFrame->GetPacketTime(&packet_time, CLOCK_FREQ); audio_frame->i_pts = audio_frame->i_dts = VLC_TS_0 + packet_time; vlc_mutex_lock(&sys->pts_lock); if (audio_frame->i_pts > sys->last_pts) sys->last_pts = audio_frame->i_pts; vlc_mutex_unlock(&sys->pts_lock); es_out_Control(demux_->out, ES_OUT_SET_PCR, audio_frame->i_pts); es_out_Send(demux_->out, sys->audio_es, audio_frame); } return S_OK; }