Esempio n. 1
0
static int video_callback(void *priv, uint8_t *frame,
                          int width, int height, int stride,
                          int64_t timestamp, int64_t duration,
                          int64_t flags)
{
//    CaptureContext *ctx = priv;
    AVPacket pkt;
    AVCodecContext *c;
    av_init_packet(&pkt);
    c = video_st->codec;
    if (verbose && frame_count++ % 25 == 0) {
        uint64_t qsize = avpacket_queue_size(&queue);
        fprintf(stderr,
                "Frame received (#%lu) - Valid (%dB) - QSize %f\n",
                frame_count,
                stride * height,
                (double)qsize / 1024 / 1024);
    }
    avpicture_fill((AVPicture *)picture, (uint8_t *)frame,
                   pix_fmt,
                   width, height);

    pkt.pts      = pkt.dts = timestamp / video_st->time_base.num;
    pkt.duration = duration / video_st->time_base.num;
    //To be made sure it still applies
    pkt.flags       |= AV_PKT_FLAG_KEY;
    pkt.stream_index = video_st->index;
    pkt.data         = frame;
    pkt.size         = stride * height;
    c->frame_number++;
    avpacket_queue_put(&queue, &pkt);

    return 0;
}
Esempio n. 2
0
void write_audio_packet(IDeckLinkAudioInputPacket *audioFrame)
{
    AVCodecContext *c;
    AVPacket pkt;
    BMDTimeValue audio_pts;
    void *audioFrameBytes;

    av_init_packet(&pkt);

    c = audio_st->codec;
    //hack among hacks
    pkt.size = audioFrame->GetSampleFrameCount() *
               g_audioChannels * (g_audioSampleDepth / 8);
    audioFrame->GetBytes(&audioFrameBytes);
    audioFrame->GetPacketTime(&audio_pts, audio_st->time_base.den);
    pkt.pts = audio_pts / audio_st->time_base.num;

    if (initial_audio_pts == AV_NOPTS_VALUE) {
        initial_audio_pts = pkt.pts;
    }

    pkt.pts -= initial_audio_pts;
    pkt.dts = pkt.pts;

    pkt.flags       |= AV_PKT_FLAG_KEY;
    pkt.stream_index = audio_st->index;
    pkt.data         = (uint8_t *)audioFrameBytes;
    c->frame_number++;

    avpacket_queue_put(&queue, &pkt);
}
Esempio n. 3
0
void write_data_packet(char *data, int size, int64_t pts)
{
    AVPacket pkt;
    av_init_packet(&pkt);

    pkt.flags        |= AV_PKT_FLAG_KEY;
    pkt.stream_index  = data_st->index;
    pkt.data          = (uint8_t*)data;
    pkt.size          = size;
    pkt.dts = pkt.pts = pts;

    avpacket_queue_put(&queue, &pkt);
}
Esempio n. 4
0
static int audio_callback(void *priv, uint8_t *frame,
                          int nb_samples,
                          int64_t timestamp,
                          int64_t flags)
{
    DecklinkConf *ctx = priv;
    AVCodecContext *c;
    AVPacket pkt;
    av_init_packet(&pkt);

    c = audio_st->codec;
    //hack among hacks
    pkt.size = nb_samples * ctx->audio_channels * (ctx->audio_sample_depth / 8);
    pkt.dts = pkt.pts = timestamp / audio_st->time_base.num;
    pkt.flags       |= AV_PKT_FLAG_KEY;
    pkt.stream_index = audio_st->index;
    pkt.data         = frame;
    c->frame_number++;
    avpacket_queue_put(&queue, &pkt);

    return 0;
}
Esempio n. 5
0
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(
    IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame)
{
    void *frameBytes;
    void *audioFrameBytes;
    BMDTimeValue frameTime;
    BMDTimeValue frameDuration;
    time_t cur_time;

    frameCount++;

    // Handle Video Frame
    if (videoFrame) {
        AVPacket pkt;
        AVCodecContext *c;
        av_init_packet(&pkt);
        c = video_st->codec;
        if (g_verbose && frameCount % 25 == 0) {
            unsigned long long qsize = avpacket_queue_size(&queue);
            fprintf(stderr,
                    "Frame received (#%lu) - Valid (%liB) - QSize %f\n",
                    frameCount,
                    videoFrame->GetRowBytes() * videoFrame->GetHeight(),
                    (double)qsize / 1024 / 1024);
        }

        videoFrame->GetBytes(&frameBytes);
        videoFrame->GetStreamTime(&frameTime, &frameDuration,
                                  video_st->time_base.den);

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
            unsigned bars[8] = {
                0xEA80EA80, 0xD292D210, 0xA910A9A5, 0x90229035,
                0x6ADD6ACA, 0x51EF515A, 0x286D28EF, 0x10801080 };
            int width  = videoFrame->GetWidth();
            int height = videoFrame->GetHeight();
            unsigned *p = (unsigned *)frameBytes;

            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x += 2)
                    *p++ = bars[(x * 8) / width];
            }

            if (!no_video) {
                time(&cur_time);
                fprintf(stderr,"%s "
                        "Frame received (#%lu) - No input signal detected "
                        "- Frames dropped %u - Total dropped %u\n",
                        ctime(&cur_time),
                        frameCount, ++dropped, ++totaldropped);
            }
            no_video = 1;
        } else {
            if (no_video) {
                time(&cur_time);
                fprintf(stderr, "%s "
                        "Frame received (#%lu) - Input returned "
                        "- Frames dropped %u - Total dropped %u\n",
                        ctime(&cur_time),
                        frameCount, ++dropped, ++totaldropped);
            }
            no_video = 0;
        }

        pkt.pts = frameTime / video_st->time_base.num;

        if (initial_video_pts == AV_NOPTS_VALUE) {
            initial_video_pts = pkt.pts;
        }

        pkt.pts -= initial_video_pts;
        pkt.dts = pkt.pts;

        pkt.duration = frameDuration;
        //To be made sure it still applies
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = video_st->index;
        pkt.data         = (uint8_t *)frameBytes;
        pkt.size         = videoFrame->GetRowBytes() *
                           videoFrame->GetHeight();
        //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts);
        c->frame_number++;
        avpacket_queue_put(&queue, &pkt);


    }

    // Handle Audio Frame
    if (audioFrame) {
        AVCodecContext *c;
        AVPacket pkt;
        BMDTimeValue audio_pts;
        av_init_packet(&pkt);

        c = audio_st->codec;
        //hack among hacks
        pkt.size = audioFrame->GetSampleFrameCount() *
                   g_audioChannels * (g_audioSampleDepth / 8);
        audioFrame->GetBytes(&audioFrameBytes);
        audioFrame->GetPacketTime(&audio_pts, audio_st->time_base.den);
        pkt.pts = audio_pts / audio_st->time_base.num;

        if (initial_audio_pts == AV_NOPTS_VALUE) {
            initial_audio_pts = pkt.pts;
        }

        pkt.pts -= initial_audio_pts;
        pkt.dts = pkt.pts;

        //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts);
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = audio_st->index;
        pkt.data         = (uint8_t *)audioFrameBytes;
        //pkt.size= avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
        c->frame_number++;
        //write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
/*            if (av_interleaved_write_frame(oc, &pkt) != 0) {
 *          fprintf(stderr, "Error while writing audio frame\n");
 *          exit(1);
 *      } */
        avpacket_queue_put(&queue, &pkt);
    }

    if (serial_fd > 0) {
        AVPacket pkt;
        char line[8] = {0};
        int count = read(serial_fd, line, 7);
        if (count > 0)
            fprintf(stderr, "read %d bytes: %s  \n", count, line);
	else line[0] = ' ';
        av_init_packet(&pkt);
        pkt.flags |= AV_PKT_FLAG_KEY;
        pkt.stream_index= data_st->index;
        pkt.data = (uint8_t*)line;
        pkt.size = 7;
        pkt.pts = frameTime / video_st->time_base.num;
        pkt.pts -= initial_video_pts;
        pkt.dts = pkt.pts;
        avpacket_queue_put(&queue, &pkt);
    }

    return S_OK;
}
Esempio n. 6
0
HRESULT decklink_input_callback::VideoInputFrameArrived(
    IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame)
{
    void *frameBytes;
    void *audioFrameBytes;
    BMDTimeValue frameTime;
    BMDTimeValue frameDuration;

    ctx->frameCount++;

    // Handle Video Frame
    if (videoFrame) {
        AVPacket pkt;
        AVCodecContext *c;
        av_init_packet(&pkt);
        c = ctx->video_st->codec;
        if (ctx->frameCount % 25 == 0) {
            unsigned long long qsize = avpacket_queue_size(&ctx->queue);
            av_log(avctx, AV_LOG_DEBUG,
                    "Frame received (#%lu) - Valid (%liB) - QSize %fMB\n",
                    ctx->frameCount,
                    videoFrame->GetRowBytes() * videoFrame->GetHeight(),
                    (double)qsize / 1024 / 1024);
        }

        videoFrame->GetBytes(&frameBytes);
        videoFrame->GetStreamTime(&frameTime, &frameDuration,
                                  ctx->video_st->time_base.den);

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
            unsigned bars[8] = {
                0xEA80EA80, 0xD292D210, 0xA910A9A5, 0x90229035,
                0x6ADD6ACA, 0x51EF515A, 0x286D28EF, 0x10801080 };
            int width  = videoFrame->GetWidth();
            int height = videoFrame->GetHeight();
            unsigned *p = (unsigned *)frameBytes;

            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x += 2)
                    *p++ = bars[(x * 8) / width];
            }

            if (!no_video) {
                av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - No input signal detected "
                        "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped);
            }
            no_video = 1;
        } else {
            if (no_video) {
                av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - Input returned "
                        "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped);
            }
            no_video = 0;
        }

        pkt.pts = frameTime / ctx->video_st->time_base.num;

        if (initial_video_pts == AV_NOPTS_VALUE) {
            initial_video_pts = pkt.pts;
        }

        pkt.pts -= initial_video_pts;
        pkt.dts = pkt.pts;

        pkt.duration = frameDuration;
        //To be made sure it still applies
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = ctx->video_st->index;
        pkt.data         = (uint8_t *)frameBytes;
        pkt.size         = videoFrame->GetRowBytes() *
                           videoFrame->GetHeight();
        //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts);
        c->frame_number++;
        if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
            ++ctx->dropped;
        }
    }

    // Handle Audio Frame
    if (audioFrame) {
        AVCodecContext *c;
        AVPacket pkt;
        BMDTimeValue audio_pts;
        av_init_packet(&pkt);

        c = ctx->audio_st->codec;
        //hack among hacks
        pkt.size = audioFrame->GetSampleFrameCount() * ctx->audio_st->codec->channels * (16 / 8);
        audioFrame->GetBytes(&audioFrameBytes);
        audioFrame->GetPacketTime(&audio_pts, ctx->audio_st->time_base.den);
        pkt.pts = audio_pts / ctx->audio_st->time_base.num;

        if (initial_audio_pts == AV_NOPTS_VALUE) {
            initial_audio_pts = pkt.pts;
        }

        pkt.pts -= initial_audio_pts;
        pkt.dts = pkt.pts;

        //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts);
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = ctx->audio_st->index;
        pkt.data         = (uint8_t *)audioFrameBytes;

        c->frame_number++;
        if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
            ++ctx->dropped;
        }
    }

    return S_OK;
}
Esempio n. 7
0
HRESULT decklink_input_callback::VideoInputFrameArrived(
    IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame)
{
    void *frameBytes;
    void *audioFrameBytes;
    BMDTimeValue frameTime;
    BMDTimeValue frameDuration;

    // set max thread priority once
    if (ctx->frameCount == 0) {
    	// thanks to MLT for code snippet
    	int r;
    	pthread_t thread;
    	pthread_attr_t tattr;
    	struct sched_param param;

    	pthread_attr_init(&tattr);
    	pthread_attr_setschedpolicy(&tattr, SCHED_FIFO);
    	param.sched_priority = sched_get_priority_max(SCHED_FIFO) - 1;
    	pthread_attr_setschedparam(&tattr, &param);

    	thread = pthread_self();
    	r = pthread_setschedparam(thread, SCHED_FIFO, &param);
    	if (r) {
    		av_log(avctx, AV_LOG_WARNING, "pthread_setschedparam returned %i\n",r);
    	} else {
    		av_log(avctx, AV_LOG_INFO, "param.sched_priority=%i\n",param.sched_priority);
    	}
    }



    ctx->frameCount++;

    // Handle Video Frame
    if (videoFrame) {
        AVPacket pkt;
        AVCodecContext *c;
        av_init_packet(&pkt);
        c = ctx->video_st->codec;
        if (ctx->frameCount % 25 == 0) {
            unsigned long long qsize = avpacket_queue_size(&ctx->queue);
            av_log(avctx, AV_LOG_DEBUG,
                    "Frame received (#%lu) - Valid (%liB) - QSize %fMB\n",
                    ctx->frameCount,
                    videoFrame->GetRowBytes() * videoFrame->GetHeight(),
                    (double)qsize / 1024 / 1024);
        }

        videoFrame->GetBytes(&frameBytes);
        videoFrame->GetStreamTime(&frameTime, &frameDuration,
                                  ctx->video_st->time_base.den);

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
            if (no_video == 0) {
                av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - No input signal detected\n", ctx->frameCount);
            }
            no_video++;
            //
            if (!cctx->nosignal_nofreeze) {
            	if (no_video < 50 && lastFrameBytes != NULL) {
            		memcpy(frameBytes,lastFrameBytes,videoFrame->GetRowBytes() * videoFrame->GetHeight());
            	} else {
            		if (nosignalFrameBytes == NULL) {
            			// init nosignal picture
            			nosignalFrameBytes = av_mallocz(videoFrame->GetRowBytes() * videoFrame->GetHeight());
            			if (videoFrame->GetPixelFormat() == bmdFormat8BitYUV) {
            				int width  = videoFrame->GetWidth();
            				int height = videoFrame->GetHeight();
            				unsigned *p = (unsigned *)nosignalFrameBytes;
            				for (int y = 0; y < height; y++) {
            			    	for (int x = 0; x < width; x += 2)
            			    		*p++ = 0x10801080;
            				}
            			}
            		}
            		memcpy(frameBytes,nosignalFrameBytes,videoFrame->GetRowBytes() * videoFrame->GetHeight());
            	}
            }
        } else {
            if (no_video > 0) {
                av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - Input returned "
                        "- Frames dropped %u\n", ctx->frameCount, no_video);
            }
            no_video = 0;
            //
            if (!cctx->nosignal_nofreeze) {
            	if (lastFrameBytes != NULL) {
            		av_free(lastFrameBytes);
            		lastFrameBytes = NULL;
            	}
            	//
            	lastFrameBytes = av_memdup(frameBytes,videoFrame->GetRowBytes() * videoFrame->GetHeight());
            }
        }

        pkt.pts = frameTime / ctx->video_st->time_base.num;

        if (initial_video_pts == AV_NOPTS_VALUE) {
            initial_video_pts = pkt.pts;
        }

        pkt.pts -= initial_video_pts;
        pkt.dts = pkt.pts;

        pkt.duration = frameDuration;
        //To be made sure it still applies
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = ctx->video_st->index;
        pkt.data         = (uint8_t *)frameBytes;
        pkt.size         = videoFrame->GetRowBytes() *
                           videoFrame->GetHeight();
        //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts);
        c->frame_number++;
        if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
            ++ctx->dropped;
        }

        // check pts drift
        int64_t pts = av_rescale_q(pkt.pts,ctx->video_st->time_base,ctx->video_st->codec->time_base);
        if (pts  != (ctx->frameCount - 1L) + videoPtsDrift) {
        	if (no_video == 0) {
        		av_log(avctx, AV_LOG_WARNING, "Video pts mismatch - current -> %ld projected -> %ld drift -> %ld\n",
        				pts, (ctx->frameCount - 1L), videoPtsDrift);
        	}
           	videoPtsDrift = pts - (ctx->frameCount - 1L);
        }
    }

    // Handle Audio Frame
    if (audioFrame) {
        AVCodecContext *c;
        AVPacket pkt;
        BMDTimeValue audio_pts;
        av_init_packet(&pkt);

        c = ctx->audio_st->codec;
        //hack among hacks
        pkt.size = audioFrame->GetSampleFrameCount() * ctx->audio_st->codec->channels * (16 / 8);
        audioFrame->GetBytes(&audioFrameBytes);
        audioFrame->GetPacketTime(&audio_pts, ctx->audio_st->time_base.den);
        pkt.pts = audio_pts / ctx->audio_st->time_base.num;

        if (initial_audio_pts == AV_NOPTS_VALUE) {
            initial_audio_pts = pkt.pts;
        }

        pkt.pts -= initial_audio_pts;
        pkt.dts = pkt.pts;

        //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts);
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = ctx->audio_st->index;
        pkt.data         = (uint8_t *)audioFrameBytes;

        c->frame_number++;
        if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
            ++ctx->dropped;
        }

        // check pts drift
        int64_t frameNumSamples = audioFrame->GetSampleFrameCount();
        int64_t pts = av_rescale_q(pkt.pts,ctx->audio_st->time_base,ctx->audio_st->codec->time_base);
        if (pts != (ctx->frameCount - 1L) * frameNumSamples + audioPtsDrift) {
        	if (no_video == 0) {
        		av_log(avctx, AV_LOG_WARNING, "Audio pts mismatch - current -> %ld projected -> %ld drift -> %ld\n",
        				pts, (ctx->frameCount - 1L) * frameNumSamples,audioPtsDrift);
        	}
           	audioPtsDrift = pts - (ctx->frameCount - 1L) * frameNumSamples;
        }
    }

    return S_OK;
}
Esempio n. 8
0
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    void *frameBytes;
    void *audioFrameBytes;
    BMDTimeValue frameTime;
    BMDTimeValue frameDuration;

    frameCount++;

    // Handle Video Frame
    if (videoFrame)
    {
        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
            fprintf(stderr, "Frame received (#%lu) - No input signal detected - Frames dropped %u - Total dropped %u\n", frameCount, ++dropped, ++totaldropped);

        {
            AVPacket pkt;
            AVCodecContext *c;
            av_init_packet(&pkt);
            c = video_st->codec;
            if (g_verbose && frameCount % 25 == 0)
            {
                unsigned long long qsize = avpacket_queue_size(&queue);
                fprintf(stderr, "Frame received (#%lu) - Valid (%liB) - QSize %f\n", frameCount, videoFrame->GetRowBytes() * videoFrame->GetHeight(), (double)qsize/1024/1024);
            }
            videoFrame->GetBytes(&frameBytes);
            avpicture_fill((AVPicture*)picture, (uint8_t *)frameBytes,
                           PIX_FMT_UYVY422,
                           videoFrame->GetWidth(), videoFrame->GetHeight());
            videoFrame->GetStreamTime(&frameTime, &frameDuration,
                                      video_st->time_base.den);
            pkt.pts = pkt.dts = frameTime/video_st->time_base.num;
            pkt.duration = frameDuration;
            //To be made sure it still applies
            pkt.flags |= AV_PKT_FLAG_KEY;
            pkt.stream_index = video_st->index;
            pkt.data = (uint8_t *)frameBytes;
            pkt.size = videoFrame->GetRowBytes() * videoFrame->GetHeight();
            //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts);
            c->frame_number++;
//            av_interleaved_write_frame(oc, &pkt);
            avpacket_queue_put(&queue, &pkt);

            //write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
        }
//        frameCount++;

        if (g_maxFrames > 0 && frameCount >= g_maxFrames ||
            avpacket_queue_size(&queue) > g_memoryLimit)
        {
            pthread_cond_signal(&sleepCond);
        }
    }

    // Handle Audio Frame
    if (audioFrame)
    {
        AVCodecContext *c;
        AVPacket pkt;
        BMDTimeValue audio_pts;
        av_init_packet(&pkt);

        c = audio_st->codec;
        //hack among hacks
        pkt.size = audioFrame->GetSampleFrameCount() *
                g_audioChannels * (g_audioSampleDepth / 8);
        audioFrame->GetBytes(&audioFrameBytes);
        audioFrame->GetPacketTime(&audio_pts, audio_st->time_base.den);
        pkt.dts = pkt.pts = audio_pts/audio_st->time_base.num;
        //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts);
        pkt.flags |= AV_PKT_FLAG_KEY;
        pkt.stream_index = audio_st->index;
        pkt.data = (uint8_t *)audioFrameBytes;
        //pkt.size= avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
        c->frame_number++;
        //write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
/*            if (av_interleaved_write_frame(oc, &pkt) != 0) {
            fprintf(stderr, "Error while writing audio frame\n");
            exit(1);
        } */
        avpacket_queue_put(&queue, &pkt);
    }
    return S_OK;
}
Esempio n. 9
0
HRESULT decklink_input_callback::VideoInputFrameArrived(
    IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame)
{
    void *frameBytes;
    void *audioFrameBytes;
    BMDTimeValue frameTime;
    BMDTimeValue frameDuration;
    int64_t wallclock = 0;

    ctx->frameCount++;
    if (ctx->audio_pts_source == PTS_SRC_WALLCLOCK || ctx->video_pts_source == PTS_SRC_WALLCLOCK)
        wallclock = av_gettime_relative();

    // Handle Video Frame
    if (videoFrame) {
        AVPacket pkt;
        av_init_packet(&pkt);
        if (ctx->frameCount % 25 == 0) {
            unsigned long long qsize = avpacket_queue_size(&ctx->queue);
            av_log(avctx, AV_LOG_DEBUG,
                    "Frame received (#%lu) - Valid (%liB) - QSize %fMB\n",
                    ctx->frameCount,
                    videoFrame->GetRowBytes() * videoFrame->GetHeight(),
                    (double)qsize / 1024 / 1024);
        }

        videoFrame->GetBytes(&frameBytes);
        videoFrame->GetStreamTime(&frameTime, &frameDuration,
                                  ctx->video_st->time_base.den);

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
            if (ctx->draw_bars && videoFrame->GetPixelFormat() == bmdFormat8BitYUV) {
                unsigned bars[8] = {
                    0xEA80EA80, 0xD292D210, 0xA910A9A5, 0x90229035,
                    0x6ADD6ACA, 0x51EF515A, 0x286D28EF, 0x10801080 };
                int width  = videoFrame->GetWidth();
                int height = videoFrame->GetHeight();
                unsigned *p = (unsigned *)frameBytes;

                for (int y = 0; y < height; y++) {
                    for (int x = 0; x < width; x += 2)
                        *p++ = bars[(x * 8) / width];
                }
            }

            if (!no_video) {
                av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - No input signal detected "
                        "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped);
            }
            no_video = 1;
        } else {
            if (no_video) {
                av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - Input returned "
                        "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped);
            }
            no_video = 0;
        }

        pkt.pts = get_pkt_pts(videoFrame, audioFrame, wallclock, ctx->video_pts_source, ctx->video_st->time_base, &initial_video_pts);
        pkt.dts = pkt.pts;

        pkt.duration = frameDuration;
        //To be made sure it still applies
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = ctx->video_st->index;
        pkt.data         = (uint8_t *)frameBytes;
        pkt.size         = videoFrame->GetRowBytes() *
                           videoFrame->GetHeight();
        //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts);

#if CONFIG_LIBZVBI
        if (!no_video && ctx->teletext_lines && videoFrame->GetPixelFormat() == bmdFormat8BitYUV && videoFrame->GetWidth() == 720) {
            IDeckLinkVideoFrameAncillary *vanc;
            AVPacket txt_pkt;
            uint8_t txt_buf0[1611]; // max 35 * 46 bytes decoded teletext lines + 1 byte data_identifier
            uint8_t *txt_buf = txt_buf0;

            if (videoFrame->GetAncillaryData(&vanc) == S_OK) {
                int i;
                int64_t line_mask = 1;
                txt_buf[0] = 0x10;    // data_identifier - EBU_data
                txt_buf++;
                for (i = 6; i < 336; i++, line_mask <<= 1) {
                    uint8_t *buf;
                    if ((ctx->teletext_lines & line_mask) && vanc->GetBufferForVerticalBlankingLine(i, (void**)&buf) == S_OK) {
                        if (teletext_data_unit_from_vbi_data(i, buf, txt_buf) >= 0)
                            txt_buf += 46;
                    }
                    if (i == 22)
                        i = 317;
                }
                vanc->Release();
                if (txt_buf - txt_buf0 > 1) {
                    int stuffing_units = (4 - ((45 + txt_buf - txt_buf0) / 46) % 4) % 4;
                    while (stuffing_units--) {
                        memset(txt_buf, 0xff, 46);
                        txt_buf[1] = 0x2c; // data_unit_length
                        txt_buf += 46;
                    }
                    av_init_packet(&txt_pkt);
                    txt_pkt.pts = pkt.pts;
                    txt_pkt.dts = pkt.dts;
                    txt_pkt.stream_index = ctx->teletext_st->index;
                    txt_pkt.data = txt_buf0;
                    txt_pkt.size = txt_buf - txt_buf0;
                    if (avpacket_queue_put(&ctx->queue, &txt_pkt) < 0) {
                        ++ctx->dropped;
                    }
                }
            }
        }
#endif

        if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
            ++ctx->dropped;
        }
    }

    // Handle Audio Frame
    if (audioFrame) {
        AVPacket pkt;
        BMDTimeValue audio_pts;
        av_init_packet(&pkt);

        //hack among hacks
        pkt.size = audioFrame->GetSampleFrameCount() * ctx->audio_st->codecpar->channels * (16 / 8);
        audioFrame->GetBytes(&audioFrameBytes);
        audioFrame->GetPacketTime(&audio_pts, ctx->audio_st->time_base.den);
        pkt.pts = get_pkt_pts(videoFrame, audioFrame, wallclock, ctx->audio_pts_source, ctx->audio_st->time_base, &initial_audio_pts);
        pkt.dts = pkt.pts;

        //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts);
        pkt.flags       |= AV_PKT_FLAG_KEY;
        pkt.stream_index = ctx->audio_st->index;
        pkt.data         = (uint8_t *)audioFrameBytes;

        if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
            ++ctx->dropped;
        }
    }

    return S_OK;
}
Esempio n. 10
0
void write_video_packet(IDeckLinkVideoInputFrame *videoFrame,
                        int64_t pts, int64_t duration)
{
    AVPacket pkt;
    AVCodecContext *c;
    void *frameBytes;
    time_t cur_time;

    av_init_packet(&pkt);
    c = video_st->codec;
    if (g_verbose && frameCount % 25 == 0) {
        unsigned long long qsize = avpacket_queue_size(&queue);
        fprintf(stderr,
                "Frame received (#%lu) - Valid (%liB) - QSize %f\n",
                frameCount,
                videoFrame->GetRowBytes() * videoFrame->GetHeight(),
                (double)qsize / 1024 / 1024);
    }

    videoFrame->GetBytes(&frameBytes);

    if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) {
        if (pix_fmt == AV_PIX_FMT_UYVY422 && draw_bars) {
            unsigned bars[8] = {
                0xEA80EA80, 0xD292D210, 0xA910A9A5, 0x90229035,
                0x6ADD6ACA, 0x51EF515A, 0x286D28EF, 0x10801080 };
            int width  = videoFrame->GetWidth();
            int height = videoFrame->GetHeight();
            unsigned *p = (unsigned *)frameBytes;

            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x += 2)
                    *p++ = bars[(x * 8) / width];
            }
        }
        if (!no_video) {
            time(&cur_time);
            fprintf(stderr,"%s "
                    "Frame received (#%lu) - No input signal detected "
                    "- Frames dropped %u - Total dropped %u\n",
                    ctime(&cur_time),
                    frameCount, ++dropped, ++totaldropped);
        }
        no_video = 1;
    } else {
        if (no_video) {
            time(&cur_time);
            fprintf(stderr, "%s "
                    "Frame received (#%lu) - Input returned "
                    "- Frames dropped %u - Total dropped %u\n",
                    ctime(&cur_time),
                    frameCount, ++dropped, ++totaldropped);
        }
        no_video = 0;
    }

    pkt.dts = pkt.pts = pts;

    pkt.duration = duration;
    //To be made sure it still applies
    pkt.flags       |= AV_PKT_FLAG_KEY;
    pkt.stream_index = video_st->index;
    pkt.data         = (uint8_t *)frameBytes;
    pkt.size         = videoFrame->GetRowBytes() *
                       videoFrame->GetHeight();
    //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts);
    c->frame_number++;
    avpacket_queue_put(&queue, &pkt);
}