// Parse the user data for captions. The udtype variable denotes // to which type of data it belongs: // 0 .. sequence header // 1 .. GOP header // 2 .. picture header // Return TRUE if the data parsing finished, FALSE otherwise. // estream->pos is advanced. Data is only processed if ustream->error // is FALSE, parsing can set ustream->error to TRUE. int user_data(struct lib_cc_decode *ctx, struct bitstream *ustream, int udtype, struct cc_subtitle *sub) { dbg_print(CCX_DMT_VERBOSE, "user_data(%d)\n", udtype); // Shall not happen if (ustream->error || ustream->bitsleft <= 0) { // ustream->error=1; return 0; // Actually discarded on call. // CFS: Seen in a Wobble edited file. // fatal(CCX_COMMON_EXIT_BUG_BUG, "user_data: Impossible!"); } // Do something ctx->stat_numuserheaders++; //header+=4; unsigned char *ud_header = next_bytes(ustream, 4); if (ustream->error || ustream->bitsleft <= 0) { return 0; // Actually discarded on call. // CFS: Seen in Stick_VHS.mpg. // fatal(CCX_COMMON_EXIT_BUG_BUG, "user_data: Impossible!"); } // DVD CC header, see // <http://www.theneitherworld.com/mcpoodle/SCC_TOOLS/DOCS/SCC_FORMAT.HTML> if ( !memcmp(ud_header,"\x43\x43", 2 ) ) { ctx->stat_dvdccheaders++; // Probably unneeded, but keep looking for extra caption blocks int maybeextracb = 1; read_bytes(ustream, 4); // "43 43 01 F8" unsigned char pattern_flag = (unsigned char) read_bits(ustream,1); read_bits(ustream,1); int capcount=(int) read_bits(ustream,5); int truncate_flag = (int) read_bits(ustream,1); // truncate_flag - one CB extra int field1packet = 0; // expect Field 1 first if (pattern_flag == 0x00) field1packet=1; // expect Field 1 second dbg_print(CCX_DMT_VERBOSE, "Reading %d%s DVD CC segments\n", capcount, (truncate_flag?"+1":"")); capcount += truncate_flag; // This data comes before the first frame header, so // in order to get the correct timing we need to set the // current time to one frame after the maximum time of the // last GOP. Only useful when there are frames before // the GOP. if (ctx->timing->fts_max > 0) ctx->timing->fts_now = ctx->timing->fts_max + (LLONG) (1000.0/current_fps); int rcbcount = 0; for (int i=0; i<capcount; i++) { for (int j=0;j<2;j++) { unsigned char data[3]; data[0]=read_u8(ustream); data[1]=read_u8(ustream); data[2]=read_u8(ustream); // Obey the truncate flag. if ( truncate_flag && i == capcount-1 && j == 1 ) { maybeextracb = 0; break; } /* Field 1 and 2 data can be in either order, with marker bytes of \xff and \xfe Since markers can be repeated, use pattern as well */ if ((data[0]&0xFE) == 0xFE) // Check if valid { if (data[0]==0xff && j==field1packet) data[0]=0x04; // Field 1 else data[0]=0x05; // Field 2 do_cb(ctx, data, sub); rcbcount++; } else { dbg_print(CCX_DMT_VERBOSE, "Illegal caption segment - stop here.\n"); maybeextracb = 0; break; } } } // Theoretically this should not happen, oh well ... // Deal with extra closed captions some DVD have. int ecbcount = 0; while ( maybeextracb && (next_u8(ustream)&0xFE) == 0xFE ) { for (int j=0;j<2;j++) { unsigned char data[3]; data[0]=read_u8(ustream); data[1]=read_u8(ustream); data[2]=read_u8(ustream); /* Field 1 and 2 data can be in either order, with marker bytes of \xff and \xfe Since markers can be repeated, use pattern as well */ if ((data[0]&0xFE) == 0xFE) // Check if valid { if (data[0]==0xff && j==field1packet) data[0]=0x04; // Field 1 else data[0]=0x05; // Field 2 do_cb(ctx, data, sub); ecbcount++; } else { dbg_print(CCX_DMT_VERBOSE, "Illegal (extra) caption segment - stop here.\n"); maybeextracb = 0; break; } } } dbg_print(CCX_DMT_VERBOSE, "Read %d/%d DVD CC blocks\n", rcbcount, ecbcount); } // SCTE 20 user data else if (!ctx->noscte20 && ud_header[0] == 0x03) { if ((ud_header[1]&0x7F) == 0x01) { unsigned char cc_data[3*31+1]; // Maximum cc_count is 31 ctx->stat_scte20ccheaders++; read_bytes(ustream, 2); // "03 01" unsigned cc_count = (unsigned int) read_bits(ustream,5); dbg_print(CCX_DMT_VERBOSE, "Reading %d SCTE 20 CC blocks\n", cc_count); unsigned field_number; unsigned cc_data1; unsigned cc_data2; for (unsigned j=0;j<cc_count;j++) { skip_bits(ustream,2); // priority - unused field_number = (unsigned int) read_bits(ustream,2); skip_bits(ustream,5); // line_offset - unused cc_data1 = (unsigned int) read_bits(ustream,8); cc_data2 = (unsigned int) read_bits(ustream,8); read_bits(ustream,1); // TODO: Add syntax check */ if (ustream->bitsleft < 0) fatal(CCX_COMMON_EXIT_BUG_BUG, "In user_data: ustream->bitsleft < 0. Cannot continue."); // Field_number is either // 0 .. forbidden // 1 .. field 1 (odd) // 2 .. field 2 (even) // 3 .. repeated, from repeat_first_field, effectively field 1 if (field_number < 1) { // 0 is invalid cc_data[j*3]=0x00; // Set to invalid cc_data[j*3+1]=0x00; cc_data[j*3+2]=0x00; } else { // Treat field_number 3 as 1 field_number = (field_number - 1) & 0x01; // top_field_first also affects to which field the caption // belongs. if(!ctx->top_field_first) field_number ^= 0x01; cc_data[j*3]=0x04|(field_number); cc_data[j*3+1]=reverse8(cc_data1); cc_data[j*3+2]=reverse8(cc_data2); } } cc_data[cc_count*3]=0xFF; store_hdcc(ctx, cc_data, cc_count, ctx->timing->current_tref, ctx->timing->fts_now, sub); dbg_print(CCX_DMT_VERBOSE, "Reading SCTE 20 CC blocks - done\n"); } // reserved - unspecified } // ReplayTV 4000/5000 caption header - parsing information // derived from CCExtract.bdl else if ( (ud_header[0] == 0xbb //ReplayTV 4000 || ud_header[0] == 0x99) //ReplayTV 5000 && ud_header[1] == 0x02 ) { unsigned char data[3]; if (ud_header[0]==0xbb) ctx->stat_replay4000headers++; else ctx->stat_replay5000headers++; read_bytes(ustream, 2); // "BB 02" or "99 02" data[0]=0x05; // Field 2 data[1]=read_u8(ustream); data[2]=read_u8(ustream); do_cb(ctx, data, sub); read_bytes(ustream, 2); // Skip "CC 02" for R4000 or "AA 02" for R5000 data[0]=0x04; // Field 1 data[1]=read_u8(ustream); data[2]=read_u8(ustream); do_cb(ctx, data, sub); } // HDTV - see A/53 Part 4 (Video) else if ( !memcmp(ud_header,"\x47\x41\x39\x34", 4 ) ) { ctx->stat_hdtv++; read_bytes(ustream, 4); // "47 41 39 34" unsigned char type_code = read_u8(ustream); if (type_code==0x03) // CC data. { skip_bits(ustream,1); // reserved unsigned char process_cc_data = (unsigned char) read_bits(ustream,1); skip_bits(ustream,1); // additional_data - unused unsigned char cc_count = (unsigned char) read_bits(ustream,5); read_bytes(ustream, 1); // "FF" if (process_cc_data) { dbg_print(CCX_DMT_VERBOSE, "Reading %d HDTV CC blocks\n", cc_count); int proceed = 1; unsigned char *cc_data = read_bytes(ustream, cc_count*3); if (ustream->bitsleft < 0) fatal(CCX_COMMON_EXIT_BUG_BUG, "In user_data: ustream->bitsleft < 0. Cannot continue.\n"); // Check for proper marker - This read makes sure that // cc_count*3+1 bytes are read and available in cc_data. if (read_u8(ustream)!=0xFF) proceed=0; if (!proceed) { dbg_print(CCX_DMT_VERBOSE, "\rThe following payload is not properly terminated.\n"); dump (CCX_DMT_VERBOSE, cc_data, cc_count*3+1, 0, 0); } dbg_print(CCX_DMT_VERBOSE, "Reading %d HD CC blocks\n", cc_count); // B-frames might be (temporal) before or after the anchor // frame they belong to. Store the buffer until the next anchor // frame occurs. The buffer will be flushed (sorted) in the // picture header (or GOP) section when the next anchor occurs. // Please note we store the current value of the global // fts_now variable (and not get_fts()) as we are going to // re-create the timeline in process_hdcc() (Slightly ugly). store_hdcc(ctx, cc_data, cc_count, ctx->timing->current_tref, ctx->timing->fts_now, sub); dbg_print(CCX_DMT_VERBOSE, "Reading HDTV blocks - done\n"); } } // reserved - additional_cc_data } // DVB closed caption header for Dish Network (Field 1 only) */ else if ( !memcmp(ud_header,"\x05\x02", 2 ) ) { // Like HDTV (above) Dish Network captions can be stored at each // frame, but maximal two caption blocks per frame and only one // field is stored. // To process this with the HDTV framework we create a "HDTV" caption // format compatible array. Two times 3 bytes plus one for the 0xFF // marker at the end. Pre-init to field 1 and set the 0xFF marker. static unsigned char dishdata[7] = {0x04, 0, 0, 0x04, 0, 0, 0xFF}; int cc_count; dbg_print(CCX_DMT_VERBOSE, "Reading Dish Network user data\n"); ctx->stat_dishheaders++; read_bytes(ustream, 2); // "05 02" // The next bytes are like this: // header[2] : ID: 0x04 (MPEG?), 0x03 (H264?) // header[3-4]: Two byte counter (counting (sub-)GOPs?) // header[5-6]: Two bytes, maybe checksum? // header[7]: Pattern type // on B-frame: 0x02, 0x04 // on I-/P-frame: 0x05 unsigned char id = read_u8(ustream); unsigned dishcount = read_u16(ustream); unsigned something = read_u16(ustream); unsigned char type = read_u8(ustream); dbg_print(CCX_DMT_PARSE, "DN ID: %02X Count: %5u Unknown: %04X Pattern: %X", id, dishcount, something, type); unsigned char hi; // The following block needs 4 to 6 bytes starting from the // current position unsigned char *dcd = ustream->pos; // dish caption data switch (type) { case 0x02: // Two byte caption - always on B-frame // The following 4 bytes are: // 0 : 0x09 // 1-2: caption block // 3 : REPEAT - 02: two bytes // - 04: four bytes (repeat first two) dbg_print(CCX_DMT_PARSE, "\n02 %02X %02X:%02X - R:%02X :", dcd[0], dcd[1], dcd[2], dcd[3]); cc_count = 1; dishdata[1]=dcd[1]; dishdata[2]=dcd[2]; dbg_print(CCX_DMT_PARSE, "%s", debug_608_to_ASC( dishdata, 0) ); type=dcd[3]; // repeater (0x02 or 0x04) hi = dishdata[1] & 0x7f; // Get only the 7 low bits if (type==0x04 && hi<32) // repeat (only for non-character pairs) { cc_count = 2; dishdata[3]=0x04; // Field 1 dishdata[4]=dishdata[1]; dishdata[5]=dishdata[2]; dbg_print(CCX_DMT_PARSE, "%s:\n", debug_608_to_ASC( dishdata+3, 0) ); } else { dbg_print(CCX_DMT_PARSE, ":\n"); } dishdata[cc_count*3] = 0xFF; // Set end marker store_hdcc(ctx, dishdata, cc_count, ctx->timing->current_tref, ctx->timing->fts_now, sub); // Ignore 3 (0x0A, followed by two unknown) bytes. break; case 0x04: // Four byte caption - always on B-frame // The following 5 bytes are: // 0 : 0x09 // 1-2: caption block // 3-4: caption block dbg_print(CCX_DMT_PARSE, "\n04 %02X %02X:%02X:%02X:%02X :", dcd[0], dcd[1], dcd[2], dcd[3], dcd[4]); cc_count = 2; dishdata[1]=dcd[1]; dishdata[2]=dcd[2]; dishdata[3]=0x04; // Field 1 dishdata[4]=dcd[3]; dishdata[5]=dcd[4]; dishdata[6] = 0xFF; // Set end marker dbg_print(CCX_DMT_PARSE, "%s", debug_608_to_ASC( dishdata, 0) ); dbg_print(CCX_DMT_PARSE, "%s:\n", debug_608_to_ASC( dishdata+3, 0) ); store_hdcc(ctx, dishdata, cc_count, ctx->timing->current_tref, ctx->timing->fts_now, sub); // Ignore 4 (0x020A, followed by two unknown) bytes. break; case 0x05: // Buffered caption - always on I-/P-frame // The following six bytes are: // 0 : 0x04 // - the following are from previous 0x05 caption header - // 1 : prev dcd[2] // 2-3: prev dcd[3-4] // 4-5: prev dcd[5-6] dbg_print(CCX_DMT_PARSE, " - %02X pch: %02X %5u %02X:%02X\n", dcd[0], dcd[1], (unsigned)dcd[2]*256+dcd[3], dcd[4], dcd[5]); dcd+=6; // Skip these 6 bytes // Now one of the "regular" 0x02 or 0x04 captions follows dbg_print(CCX_DMT_PARSE, "%02X %02X %02X:%02X", dcd[0], dcd[1], dcd[2], dcd[3]); type=dcd[0]; // Number of caption bytes (0x02 or 0x04) cc_count = 1; dishdata[1]=dcd[2]; dishdata[2]=dcd[3]; dcd+=4; // Skip the first 4 bytes. if (type==0x02) { type=dcd[0]; // repeater (0x02 or 0x04) dcd++; // Skip the repeater byte. dbg_print(CCX_DMT_PARSE, " - R:%02X :%s", type, debug_608_to_ASC( dishdata, 0) ); hi = dishdata[1] & 0x7f; // Get only the 7 low bits if (type==0x04 && hi<32) { cc_count = 2; dishdata[3]=0x04; // Field 1 dishdata[4]=dishdata[1]; dishdata[5]=dishdata[2]; dbg_print(CCX_DMT_PARSE, "%s:\n", debug_608_to_ASC( dishdata+3, 0) ); } else { dbg_print(CCX_DMT_PARSE, ":\n"); } dishdata[cc_count*3] = 0xFF; // Set end marker } else { dbg_print(CCX_DMT_PARSE, ":%02X:%02X ", dcd[0], dcd[1]); cc_count = 2; dishdata[3]=0x04; // Field 1 dishdata[4]=dcd[0]; dishdata[5]=dcd[1]; dishdata[6] = 0xFF; // Set end marker dbg_print(CCX_DMT_PARSE, ":%s", debug_608_to_ASC( dishdata, 0) ); dbg_print(CCX_DMT_PARSE, "%s:\n", debug_608_to_ASC( dishdata+3, 0) ); } store_hdcc(ctx, dishdata, cc_count, ctx->timing->current_tref, ctx->timing->fts_now, sub); // Ignore 3 (0x0A, followed by 2 unknown) bytes. break; default: // printf ("Unknown?\n"); break; } // switch dbg_print(CCX_DMT_VERBOSE, "Reading Dish Network user data - done\n"); } // CEA 608 / aka "Divicom standard", see: // http://www.pixeltools.com/tech_tip_closed_captioning.html else if ( !memcmp(ud_header,"\x02\x09", 2 ) ) { // Either a documentation or more examples are needed. ctx->stat_divicom++; unsigned char data[3]; read_bytes(ustream, 2); // "02 09" read_bytes(ustream, 2); // "80 80" ??? read_bytes(ustream, 2); // "02 0A" ??? data[0]=0x04; // Field 1 data[1]=read_u8(ustream); data[2]=read_u8(ustream); do_cb(ctx, data, sub); // This is probably incomplete! } // GXF vbi OEM code else if ( !memcmp(ud_header,"\x73\x52\x21\x06", 4 ) ) { int udatalen = ustream->end - ustream->pos; uint16_t line_nb; uint8_t line_type; uint8_t field = 1; read_bytes(ustream, 4); //skip header code read_bytes(ustream, 2); //skip data length line_nb = read_bits(ustream, 16); line_type = read_u8(ustream); field = (line_type & 0x03); if(field == 0) mprint("MPEG:VBI: Invalid field\n"); line_type = line_type >> 2; if(line_type != 1) mprint("MPEG:VBI: only support Luma line\n"); if (udatalen < 720) mprint("MPEG:VBI: Minimum 720 bytes in luma line required\n"); decode_vbi(ctx, field, ustream->pos, 720, sub); dbg_print(CCX_DMT_VERBOSE, "GXF (vbi line %d) user data:\n", line_nb); }
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived( IDeckLinkVideoInputFrame *videoframe, IDeckLinkAudioInputPacket *audioframe ) { decklink_ctx_t *decklink_ctx = &decklink_opts_->decklink_ctx; obe_raw_frame_t *raw_frame = NULL; AVPacket pkt; AVFrame *frame = NULL; void *frame_bytes, *anc_line; obe_t *h = decklink_ctx->h; int finished = 0, ret, num_anc_lines = 0, anc_line_stride, lines_read = 0, first_line = 0, last_line = 0, line, num_vbi_lines, vii_line; uint32_t *frame_ptr; uint16_t *anc_buf, *anc_buf_pos; uint8_t *vbi_buf; int anc_lines[DECKLINK_VANC_LINES]; IDeckLinkVideoFrameAncillary *ancillary; BMDTimeValue stream_time, frame_duration; if( decklink_opts_->probe_success ) return S_OK; av_init_packet( &pkt ); if( videoframe ) { if( videoframe->GetFlags() & bmdFrameHasNoInputSource ) { syslog( LOG_ERR, "Decklink card index %i: No input signal detected", decklink_opts_->card_idx ); return S_OK; } else if( decklink_opts_->probe ) decklink_opts_->probe_success = 1; /* use SDI ticks as clock source */ videoframe->GetStreamTime( &stream_time, &frame_duration, OBE_CLOCK ); obe_clock_tick( h, (int64_t)stream_time ); if( decklink_ctx->last_frame_time == -1 ) decklink_ctx->last_frame_time = obe_mdate(); else { int64_t cur_frame_time = obe_mdate(); if( cur_frame_time - decklink_ctx->last_frame_time >= SDI_MAX_DELAY ) { syslog( LOG_WARNING, "Decklink card index %i: No frame received for %"PRIi64" ms", decklink_opts_->card_idx, (cur_frame_time - decklink_ctx->last_frame_time) / 1000 ); pthread_mutex_lock( &h->drop_mutex ); h->encoder_drop = h->mux_drop = 1; pthread_mutex_unlock( &h->drop_mutex ); } decklink_ctx->last_frame_time = cur_frame_time; } const int width = videoframe->GetWidth(); const int height = videoframe->GetHeight(); const int stride = videoframe->GetRowBytes(); videoframe->GetBytes( &frame_bytes ); /* TODO: support format switching (rare in SDI) */ int j; for( j = 0; first_active_line[j].format != -1; j++ ) { if( decklink_opts_->video_format == first_active_line[j].format ) break; } videoframe->GetAncillaryData( &ancillary ); /* NTSC starts on line 4 */ line = decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC ? 4 : 1; anc_line_stride = FFALIGN( (width * 2 * sizeof(uint16_t)), 16 ); /* Overallocate slightly for VANC buffer * Some VBI services stray into the active picture so allocate some extra space */ anc_buf = anc_buf_pos = (uint16_t*)av_malloc( DECKLINK_VANC_LINES * anc_line_stride ); if( !anc_buf ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } while( 1 ) { /* Some cards have restrictions on what lines can be accessed so try them all * Some buggy decklink cards will randomly refuse access to a particular line so * work around this issue by blanking the line */ if( ancillary->GetBufferForVerticalBlankingLine( line, &anc_line ) == S_OK ) decklink_ctx->unpack_line( (uint32_t*)anc_line, anc_buf_pos, width ); else decklink_ctx->blank_line( anc_buf_pos, width ); anc_buf_pos += anc_line_stride / 2; anc_lines[num_anc_lines++] = line; if( !first_line ) first_line = line; last_line = line; lines_read++; line = sdi_next_line( decklink_opts_->video_format, line ); if( line == first_active_line[j].line ) break; } ancillary->Release(); if( !decklink_opts_->probe ) { raw_frame = new_raw_frame(); if( !raw_frame ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } } anc_buf_pos = anc_buf; for( int i = 0; i < num_anc_lines; i++ ) { parse_vanc_line( h, &decklink_ctx->non_display_parser, raw_frame, anc_buf_pos, width, anc_lines[i] ); anc_buf_pos += anc_line_stride / 2; } if( IS_SD( decklink_opts_->video_format ) && first_line != last_line ) { /* Add a some VBI lines to the ancillary buffer */ frame_ptr = (uint32_t*)frame_bytes; /* NTSC starts from line 283 so add an extra line */ num_vbi_lines = NUM_ACTIVE_VBI_LINES + ( decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC ); for( int i = 0; i < num_vbi_lines; i++ ) { decklink_ctx->unpack_line( frame_ptr, anc_buf_pos, width ); anc_buf_pos += anc_line_stride / 2; frame_ptr += stride / 4; last_line = sdi_next_line( decklink_opts_->video_format, last_line ); } num_anc_lines += num_vbi_lines; vbi_buf = (uint8_t*)av_malloc( width * 2 * num_anc_lines ); if( !vbi_buf ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } /* Scale the lines from 10-bit to 8-bit */ decklink_ctx->downscale_line( anc_buf, vbi_buf, num_anc_lines ); anc_buf_pos = anc_buf; /* Handle Video Index information */ int tmp_line = first_line; vii_line = decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC ? NTSC_VIDEO_INDEX_LINE : PAL_VIDEO_INDEX_LINE; while( tmp_line < vii_line ) { anc_buf_pos += anc_line_stride / 2; tmp_line++; } if( decode_video_index_information( h, &decklink_ctx->non_display_parser, anc_buf_pos, raw_frame, vii_line ) < 0 ) goto fail; if( !decklink_ctx->has_setup_vbi ) { vbi_raw_decoder_init( &decklink_ctx->non_display_parser.vbi_decoder ); decklink_ctx->non_display_parser.ntsc = decklink_opts_->video_format == INPUT_VIDEO_FORMAT_NTSC; decklink_ctx->non_display_parser.vbi_decoder.start[0] = first_line; decklink_ctx->non_display_parser.vbi_decoder.start[1] = sdi_next_line( decklink_opts_->video_format, first_line ); decklink_ctx->non_display_parser.vbi_decoder.count[0] = last_line - decklink_ctx->non_display_parser.vbi_decoder.start[1] + 1; decklink_ctx->non_display_parser.vbi_decoder.count[1] = decklink_ctx->non_display_parser.vbi_decoder.count[0]; if( setup_vbi_parser( &decklink_ctx->non_display_parser ) < 0 ) goto fail; decklink_ctx->has_setup_vbi = 1; } if( decode_vbi( h, &decklink_ctx->non_display_parser, vbi_buf, raw_frame ) < 0 ) goto fail; av_free( vbi_buf ); } av_free( anc_buf ); if( !decklink_opts_->probe ) { frame = avcodec_alloc_frame(); if( !frame ) { syslog( LOG_ERR, "[decklink]: Could not allocate video frame\n" ); goto end; } decklink_ctx->codec->width = width; decklink_ctx->codec->height = height; pkt.data = (uint8_t*)frame_bytes; pkt.size = stride * height; ret = avcodec_decode_video2( decklink_ctx->codec, frame, &finished, &pkt ); if( ret < 0 || !finished ) { syslog( LOG_ERR, "[decklink]: Could not decode video frame\n" ); goto end; } raw_frame->release_data = obe_release_video_data; raw_frame->release_frame = obe_release_frame; memcpy( raw_frame->alloc_img.stride, frame->linesize, sizeof(raw_frame->alloc_img.stride) ); memcpy( raw_frame->alloc_img.plane, frame->data, sizeof(raw_frame->alloc_img.plane) ); avcodec_free_frame( &frame ); raw_frame->alloc_img.csp = (int)decklink_ctx->codec->pix_fmt; raw_frame->alloc_img.planes = av_pix_fmt_descriptors[raw_frame->alloc_img.csp].nb_components; raw_frame->alloc_img.width = width; raw_frame->alloc_img.height = height; raw_frame->alloc_img.format = decklink_opts_->video_format; raw_frame->timebase_num = decklink_opts_->timebase_num; raw_frame->timebase_den = decklink_opts_->timebase_den; memcpy( &raw_frame->img, &raw_frame->alloc_img, sizeof(raw_frame->alloc_img) ); if( IS_SD( decklink_opts_->video_format ) ) { if( raw_frame->alloc_img.height == 486 ) raw_frame->img.height = 480; raw_frame->img.first_line = first_active_line[j].line; } /* If AFD is present and the stream is SD this will be changed in the video filter */ raw_frame->sar_width = raw_frame->sar_height = 1; raw_frame->pts = stream_time; for( int i = 0; i < decklink_ctx->device->num_input_streams; i++ ) { if( decklink_ctx->device->streams[i]->stream_format == VIDEO_UNCOMPRESSED ) raw_frame->input_stream_id = decklink_ctx->device->streams[i]->input_stream_id; } if( add_to_filter_queue( h, raw_frame ) < 0 ) goto fail; if( send_vbi_and_ttx( h, &decklink_ctx->non_display_parser, raw_frame->pts ) < 0 ) goto fail; decklink_ctx->non_display_parser.num_vbi = 0; decklink_ctx->non_display_parser.num_anc_vbi = 0; } } /* TODO: probe SMPTE 337M audio */ if( audioframe && !decklink_opts_->probe ) { audioframe->GetBytes( &frame_bytes ); raw_frame = new_raw_frame(); if( !raw_frame ) { syslog( LOG_ERR, "Malloc failed\n" ); goto end; } raw_frame->audio_frame.num_samples = audioframe->GetSampleFrameCount(); raw_frame->audio_frame.num_channels = decklink_opts_->num_channels; raw_frame->audio_frame.sample_fmt = AV_SAMPLE_FMT_S32P; if( av_samples_alloc( raw_frame->audio_frame.audio_data, &raw_frame->audio_frame.linesize, decklink_opts_->num_channels, raw_frame->audio_frame.num_samples, (AVSampleFormat)raw_frame->audio_frame.sample_fmt, 0 ) < 0 ) { syslog( LOG_ERR, "Malloc failed\n" ); return -1; } if( avresample_convert( decklink_ctx->avr, raw_frame->audio_frame.audio_data, raw_frame->audio_frame.linesize, raw_frame->audio_frame.num_samples, (uint8_t**)&frame_bytes, 0, raw_frame->audio_frame.num_samples ) < 0 ) { syslog( LOG_ERR, "[decklink] Sample format conversion failed\n" ); return -1; } BMDTimeValue packet_time; audioframe->GetPacketTime( &packet_time, OBE_CLOCK ); raw_frame->pts = packet_time; raw_frame->release_data = obe_release_audio_data; raw_frame->release_frame = obe_release_frame; for( int i = 0; i < decklink_ctx->device->num_input_streams; i++ ) { if( decklink_ctx->device->streams[i]->stream_format == AUDIO_PCM ) raw_frame->input_stream_id = decklink_ctx->device->streams[i]->input_stream_id; } if( add_to_filter_queue( decklink_ctx->h, raw_frame ) < 0 ) goto fail; } end: if( frame ) avcodec_free_frame( &frame ); av_free_packet( &pkt ); return S_OK; fail: if( raw_frame ) { raw_frame->release_data( raw_frame ); raw_frame->release_frame( raw_frame ); } return S_OK; }