static av_cold int source_init(AVFilterContext *ctx, const char *args, void *opaque) { Frei0rContext *frei0r = ctx->priv; char dl_name[1024], c; char frame_size[128] = ""; char frame_rate[128] = ""; AVRational frame_rate_q; memset(frei0r->params, 0, sizeof(frei0r->params)); if (args) sscanf(args, "%127[^:]:%127[^:]:%1023[^:=]%c%255c", frame_size, frame_rate, dl_name, &c, frei0r->params); if (av_parse_video_size(&frei0r->w, &frei0r->h, frame_size) < 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame size: '%s'\n", frame_size); return AVERROR(EINVAL); } if (av_parse_video_rate(&frame_rate_q, frame_rate) < 0 || frame_rate_q.den <= 0 || frame_rate_q.num <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame rate: '%s'\n", frame_rate); return AVERROR(EINVAL); } frei0r->time_base.num = frame_rate_q.den; frei0r->time_base.den = frame_rate_q.num; return frei0r_init(ctx, dl_name, F0R_PLUGIN_TYPE_SOURCE); }
static av_cold int color_init(AVFilterContext *ctx, const char *args) { ColorContext *color = ctx->priv; char color_string[128] = "black"; char frame_size [128] = "320x240"; char frame_rate [128] = "25"; AVRational frame_rate_q; int ret; if (args) sscanf(args, "%127[^:]:%127[^:]:%127s", color_string, frame_size, frame_rate); if (av_parse_video_size(&color->w, &color->h, frame_size) < 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame size: %s\n", frame_size); return AVERROR(EINVAL); } if (av_parse_video_rate(&frame_rate_q, frame_rate) < 0 || frame_rate_q.den <= 0 || frame_rate_q.num <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame rate: %s\n", frame_rate); return AVERROR(EINVAL); } color->time_base.num = frame_rate_q.den; color->time_base.den = frame_rate_q.num; if ((ret = av_parse_color(color->color, color_string, -1, ctx)) < 0) return ret; return 0; }
/* MPEG-1/H.263 input */ int ff_raw_video_read_header(AVFormatContext *s, AVFormatParameters *ap) { AVStream *st; FFRawVideoDemuxerContext *s1 = s->priv_data; AVRational framerate; int ret = 0; st = avformat_new_stream(s, NULL); if (!st) { ret = AVERROR(ENOMEM); goto fail; } st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = s->iformat->value; st->need_parsing = AVSTREAM_PARSE_FULL; if ((ret = av_parse_video_rate(&framerate, s1->framerate)) < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate: %s.\n", s1->framerate); goto fail; } st->codec->time_base = (AVRational){framerate.den, framerate.num}; avpriv_set_pts_info(st, 64, 1, 1200000); fail: return ret; }
/* MPEG-1/H.263 input */ int ff_raw_video_read_header(AVFormatContext *s) { AVStream *st; FFRawVideoDemuxerContext *s1 = s->priv_data; AVRational framerate; int ret = 0; st = avformat_new_stream(s, NULL); if (!st) { ret = AVERROR(ENOMEM); goto fail; } st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = s->iformat->raw_codec_id; st->need_parsing = AVSTREAM_PARSE_FULL; if ((ret = av_parse_video_rate(&framerate, s1->framerate)) < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate: %s.\n", s1->framerate); goto fail; } #if FF_API_R_FRAME_RATE st->r_frame_rate = #endif st->avg_frame_rate = framerate; avpriv_set_pts_info(st, 64, framerate.den, framerate.num); fail: return ret; }
static av_cold int init(AVFilterContext *ctx) { FPSContext *s = ctx->priv; int ret; if ((ret = av_parse_video_rate(&s->framerate, s->fps)) < 0) { av_log(ctx, AV_LOG_ERROR, "Error parsing framerate %s.\n", s->fps); return ret; } if (!(s->fifo = av_fifo_alloc(2*sizeof(AVFrame*)))) return AVERROR(ENOMEM); s->pts = AV_NOPTS_VALUE; av_log(ctx, AV_LOG_VERBOSE, "fps=%d/%d\n", s->framerate.num, s->framerate.den); return 0; }
int main(void) { printf("Testing av_parse_video_rate()\n"); { int i; const char *rates[] = { "-inf", "inf", "nan", "123/0", "-123 / 0", "", "/", " 123 / 321", "foo/foo", "foo/1", "1/foo", "0/0", "/0", "1/", "1", "0", "-123/123", "-foo", "123.23", ".23", "-.23", "-0.234", "-0.0000001", " 21332.2324 ", " -21332.2324 ", }; for (i = 0; i < FF_ARRAY_ELEMS(rates); i++) { int ret; AVRational q = (AVRational){0, 0}; ret = av_parse_video_rate(&q, rates[i]), printf("'%s' -> %d/%d ret:%d\n", rates[i], q.num, q.den, ret); } } return 0; }
static int rawvideo_read_header(AVFormatContext *ctx) { RawVideoDemuxerContext *s = ctx->priv_data; int width = 0, height = 0, ret = 0; enum AVPixelFormat pix_fmt; AVRational framerate; AVStream *st; st = avformat_new_stream(ctx, NULL); if (!st) return AVERROR(ENOMEM); st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = ctx->iformat->raw_codec_id; if (s->video_size && (ret = av_parse_video_size(&width, &height, s->video_size)) < 0) { av_log(ctx, AV_LOG_ERROR, "Couldn't parse video size.\n"); return ret; } if ((pix_fmt = av_get_pix_fmt(s->pixel_format)) == AV_PIX_FMT_NONE) { av_log(ctx, AV_LOG_ERROR, "No such pixel format: %s.\n", s->pixel_format); return AVERROR(EINVAL); } if ((ret = av_parse_video_rate(&framerate, s->framerate)) < 0) { av_log(ctx, AV_LOG_ERROR, "Could not parse framerate: %s.\n", s->framerate); return ret; } avpriv_set_pts_info(st, 64, framerate.den, framerate.num); st->codec->width = width; st->codec->height = height; st->codec->pix_fmt = pix_fmt; st->codec->bit_rate = av_rescale_q(avpicture_get_size(st->codec->pix_fmt, width, height), (AVRational){8,1}, st->time_base); return 0; }
static av_cold int source_init(AVFilterContext *ctx) { Frei0rContext *s = ctx->priv; AVRational frame_rate_q; if (av_parse_video_size(&s->w, &s->h, s->size) < 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame size: '%s'.\n", s->size); return AVERROR(EINVAL); } if (av_parse_video_rate(&frame_rate_q, s->framerate) < 0 || frame_rate_q.den <= 0 || frame_rate_q.num <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame rate: '%s'.\n", s->framerate); return AVERROR(EINVAL); } s->time_base.num = frame_rate_q.den; s->time_base.den = frame_rate_q.num; return frei0r_init(ctx, s->dl_name, F0R_PLUGIN_TYPE_SOURCE); }
static void test_av_parse_video_rate(void) { int i; static const char *const rates[] = { "-inf", "inf", "nan", "123/0", "-123 / 0", "", "/", " 123 / 321", "foo/foo", "foo/1", "1/foo", "0/0", "/0", "1/", "1", "0", "-123/123", "-foo", "123.23", ".23", "-.23", "-0.234", "-0.0000001", " 21332.2324 ", " -21332.2324 ", }; for (i = 0; i < FF_ARRAY_ELEMS(rates); i++) { int ret; AVRational q = { 0, 0 }; ret = av_parse_video_rate(&q, rates[i]); printf("'%s' -> %d/%d %s\n", rates[i], q.num, q.den, ret ? "ERROR" : "OK"); } }
static av_cold int color_init(AVFilterContext *ctx) { ColorContext *color = ctx->priv; AVRational frame_rate_q; int ret; if (av_parse_video_size(&color->w, &color->h, color->size_str) < 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame size: %s\n", color->size_str); return AVERROR(EINVAL); } if (av_parse_video_rate(&frame_rate_q, color->framerate_str) < 0 || frame_rate_q.den <= 0 || frame_rate_q.num <= 0) { av_log(ctx, AV_LOG_ERROR, "Invalid frame rate: %s\n", color->framerate_str); return AVERROR(EINVAL); } color->time_base.num = frame_rate_q.den; color->time_base.den = frame_rate_q.num; if ((ret = av_parse_color(color->color, color->color_str, -1, ctx)) < 0) return ret; return 0; }
static int ffserver_parse_config_stream(FFServerConfig *config, const char *cmd, const char **p, int line_num, FFServerStream **pstream) { char arg[1024], arg2[1024]; FFServerStream *stream; av_assert0(pstream); stream = *pstream; if (!av_strcasecmp(cmd, "<Stream")) { char *q; FFServerStream *s; stream = av_mallocz(sizeof(FFServerStream)); if (!stream) return AVERROR(ENOMEM); ffserver_get_arg(stream->filename, sizeof(stream->filename), p); q = strrchr(stream->filename, '>'); if (q) *q = '\0'; for (s = config->first_stream; s; s = s->next) { if (!strcmp(stream->filename, s->filename)) ERROR("Stream '%s' already registered\n", s->filename); } stream->fmt = ffserver_guess_format(NULL, stream->filename, NULL); avcodec_get_context_defaults3(&config->video_enc, NULL); avcodec_get_context_defaults3(&config->audio_enc, NULL); config->audio_id = AV_CODEC_ID_NONE; config->video_id = AV_CODEC_ID_NONE; if (stream->fmt) { config->audio_id = stream->fmt->audio_codec; config->video_id = stream->fmt->video_codec; } *pstream = stream; return 0; } av_assert0(stream); if (!av_strcasecmp(cmd, "Feed")) { FFServerStream *sfeed; ffserver_get_arg(arg, sizeof(arg), p); sfeed = config->first_feed; while (sfeed) { if (!strcmp(sfeed->filename, arg)) break; sfeed = sfeed->next_feed; } if (!sfeed) ERROR("Feed with name '%s' for stream '%s' is not defined\n", arg, stream->filename); else stream->feed = sfeed; } else if (!av_strcasecmp(cmd, "Format")) { ffserver_get_arg(arg, sizeof(arg), p); if (!strcmp(arg, "status")) { stream->stream_type = STREAM_TYPE_STATUS; stream->fmt = NULL; } else { stream->stream_type = STREAM_TYPE_LIVE; /* JPEG cannot be used here, so use single frame MJPEG */ if (!strcmp(arg, "jpeg")) strcpy(arg, "mjpeg"); stream->fmt = ffserver_guess_format(arg, NULL, NULL); if (!stream->fmt) ERROR("Unknown Format: %s\n", arg); } if (stream->fmt) { config->audio_id = stream->fmt->audio_codec; config->video_id = stream->fmt->video_codec; } } else if (!av_strcasecmp(cmd, "InputFormat")) { ffserver_get_arg(arg, sizeof(arg), p); stream->ifmt = av_find_input_format(arg); if (!stream->ifmt) ERROR("Unknown input format: %s\n", arg); } else if (!av_strcasecmp(cmd, "FaviconURL")) { if (stream->stream_type == STREAM_TYPE_STATUS) ffserver_get_arg(stream->feed_filename, sizeof(stream->feed_filename), p); else ERROR("FaviconURL only permitted for status streams\n"); } else if (!av_strcasecmp(cmd, "Author") || !av_strcasecmp(cmd, "Comment") || !av_strcasecmp(cmd, "Copyright") || !av_strcasecmp(cmd, "Title")) { char key[32]; int i, ret; ffserver_get_arg(arg, sizeof(arg), p); for (i = 0; i < strlen(cmd); i++) key[i] = av_tolower(cmd[i]); key[i] = 0; WARNING("'%s' option in configuration file is deprecated, " "use 'Metadata %s VALUE' instead\n", cmd, key); if ((ret = av_dict_set(&stream->metadata, key, arg, 0)) < 0) ERROR("Could not set metadata '%s' to value '%s': %s\n", key, arg, av_err2str(ret)); } else if (!av_strcasecmp(cmd, "Metadata")) { int ret; ffserver_get_arg(arg, sizeof(arg), p); ffserver_get_arg(arg2, sizeof(arg2), p); if ((ret = av_dict_set(&stream->metadata, arg, arg2, 0)) < 0) { ERROR("Could not set metadata '%s' to value '%s': %s\n", arg, arg2, av_err2str(ret)); } } else if (!av_strcasecmp(cmd, "Preroll")) { ffserver_get_arg(arg, sizeof(arg), p); stream->prebuffer = atof(arg) * 1000; } else if (!av_strcasecmp(cmd, "StartSendOnKey")) { stream->send_on_key = 1; } else if (!av_strcasecmp(cmd, "AudioCodec")) { ffserver_get_arg(arg, sizeof(arg), p); config->audio_id = opt_codec(arg, AVMEDIA_TYPE_AUDIO); if (config->audio_id == AV_CODEC_ID_NONE) ERROR("Unknown AudioCodec: %s\n", arg); } else if (!av_strcasecmp(cmd, "VideoCodec")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_id = opt_codec(arg, AVMEDIA_TYPE_VIDEO); if (config->video_id == AV_CODEC_ID_NONE) ERROR("Unknown VideoCodec: %s\n", arg); } else if (!av_strcasecmp(cmd, "MaxTime")) { ffserver_get_arg(arg, sizeof(arg), p); stream->max_time = atof(arg) * 1000; } else if (!av_strcasecmp(cmd, "AudioBitRate")) { ffserver_get_arg(arg, sizeof(arg), p); config->audio_enc.bit_rate = lrintf(atof(arg) * 1000); } else if (!av_strcasecmp(cmd, "AudioChannels")) { ffserver_get_arg(arg, sizeof(arg), p); config->audio_enc.channels = atoi(arg); } else if (!av_strcasecmp(cmd, "AudioSampleRate")) { ffserver_get_arg(arg, sizeof(arg), p); config->audio_enc.sample_rate = atoi(arg); } else if (!av_strcasecmp(cmd, "VideoBitRateRange")) { int minrate, maxrate; ffserver_get_arg(arg, sizeof(arg), p); if (sscanf(arg, "%d-%d", &minrate, &maxrate) == 2) { config->video_enc.rc_min_rate = minrate * 1000; config->video_enc.rc_max_rate = maxrate * 1000; } else ERROR("Incorrect format for VideoBitRateRange -- should be <min>-<max>: %s\n", arg); } else if (!av_strcasecmp(cmd, "Debug")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.debug = strtol(arg,0,0); } else if (!av_strcasecmp(cmd, "Strict")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.strict_std_compliance = atoi(arg); } else if (!av_strcasecmp(cmd, "VideoBufferSize")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.rc_buffer_size = atoi(arg) * 8*1024; } else if (!av_strcasecmp(cmd, "VideoBitRateTolerance")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.bit_rate_tolerance = atoi(arg) * 1000; } else if (!av_strcasecmp(cmd, "VideoBitRate")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.bit_rate = atoi(arg) * 1000; } else if (!av_strcasecmp(cmd, "VideoSize")) { int ret; ffserver_get_arg(arg, sizeof(arg), p); ret = av_parse_video_size(&config->video_enc.width, &config->video_enc.height, arg); if (ret < 0) ERROR("Invalid video size '%s'\n", arg); else if ((config->video_enc.width % 16) != 0 || (config->video_enc.height % 16) != 0) ERROR("Image size must be a multiple of 16\n"); } else if (!av_strcasecmp(cmd, "VideoFrameRate")) { AVRational frame_rate; ffserver_get_arg(arg, sizeof(arg), p); if (av_parse_video_rate(&frame_rate, arg) < 0) { ERROR("Incorrect frame rate: %s\n", arg); } else { config->video_enc.time_base.num = frame_rate.den; config->video_enc.time_base.den = frame_rate.num; } } else if (!av_strcasecmp(cmd, "PixelFormat")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.pix_fmt = av_get_pix_fmt(arg); if (config->video_enc.pix_fmt == AV_PIX_FMT_NONE) ERROR("Unknown pixel format: %s\n", arg); } else if (!av_strcasecmp(cmd, "VideoGopSize")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.gop_size = atoi(arg); } else if (!av_strcasecmp(cmd, "VideoIntraOnly")) { config->video_enc.gop_size = 1; } else if (!av_strcasecmp(cmd, "VideoHighQuality")) { config->video_enc.mb_decision = FF_MB_DECISION_BITS; } else if (!av_strcasecmp(cmd, "Video4MotionVector")) { config->video_enc.mb_decision = FF_MB_DECISION_BITS; //FIXME remove config->video_enc.flags |= CODEC_FLAG_4MV; } else if (!av_strcasecmp(cmd, "AVOptionVideo") || !av_strcasecmp(cmd, "AVOptionAudio")) { AVCodecContext *avctx; int type; ffserver_get_arg(arg, sizeof(arg), p); ffserver_get_arg(arg2, sizeof(arg2), p); if (!av_strcasecmp(cmd, "AVOptionVideo")) { avctx = &config->video_enc; type = AV_OPT_FLAG_VIDEO_PARAM; } else { avctx = &config->audio_enc; type = AV_OPT_FLAG_AUDIO_PARAM; } if (ffserver_opt_default(arg, arg2, avctx, type|AV_OPT_FLAG_ENCODING_PARAM)) { ERROR("Error setting %s option to %s %s\n", cmd, arg, arg2); } } else if (!av_strcasecmp(cmd, "AVPresetVideo") || !av_strcasecmp(cmd, "AVPresetAudio")) { AVCodecContext *avctx; int type; ffserver_get_arg(arg, sizeof(arg), p); if (!av_strcasecmp(cmd, "AVPresetVideo")) { avctx = &config->video_enc; config->video_enc.codec_id = config->video_id; type = AV_OPT_FLAG_VIDEO_PARAM; } else { avctx = &config->audio_enc; config->audio_enc.codec_id = config->audio_id; type = AV_OPT_FLAG_AUDIO_PARAM; } if (ffserver_opt_preset(arg, avctx, type|AV_OPT_FLAG_ENCODING_PARAM, &config->audio_id, &config->video_id)) { ERROR("AVPreset error: %s\n", arg); } } else if (!av_strcasecmp(cmd, "VideoTag")) { ffserver_get_arg(arg, sizeof(arg), p); if (strlen(arg) == 4) config->video_enc.codec_tag = MKTAG(arg[0], arg[1], arg[2], arg[3]); } else if (!av_strcasecmp(cmd, "BitExact")) { config->video_enc.flags |= CODEC_FLAG_BITEXACT; } else if (!av_strcasecmp(cmd, "DctFastint")) { config->video_enc.dct_algo = FF_DCT_FASTINT; } else if (!av_strcasecmp(cmd, "IdctSimple")) { config->video_enc.idct_algo = FF_IDCT_SIMPLE; } else if (!av_strcasecmp(cmd, "Qscale")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.flags |= CODEC_FLAG_QSCALE; config->video_enc.global_quality = FF_QP2LAMBDA * atoi(arg); } else if (!av_strcasecmp(cmd, "VideoQDiff")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.max_qdiff = atoi(arg); if (config->video_enc.max_qdiff < 1 || config->video_enc.max_qdiff > 31) ERROR("VideoQDiff out of range\n"); } else if (!av_strcasecmp(cmd, "VideoQMax")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.qmax = atoi(arg); if (config->video_enc.qmax < 1 || config->video_enc.qmax > 31) ERROR("VideoQMax out of range\n"); } else if (!av_strcasecmp(cmd, "VideoQMin")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.qmin = atoi(arg); if (config->video_enc.qmin < 1 || config->video_enc.qmin > 31) ERROR("VideoQMin out of range\n"); } else if (!av_strcasecmp(cmd, "LumiMask")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.lumi_masking = atof(arg); } else if (!av_strcasecmp(cmd, "DarkMask")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_enc.dark_masking = atof(arg); } else if (!av_strcasecmp(cmd, "NoVideo")) { config->video_id = AV_CODEC_ID_NONE; } else if (!av_strcasecmp(cmd, "NoAudio")) { config->audio_id = AV_CODEC_ID_NONE; } else if (!av_strcasecmp(cmd, "ACL")) { ffserver_parse_acl_row(stream, NULL, NULL, *p, config->filename, line_num); } else if (!av_strcasecmp(cmd, "DynamicACL")) { ffserver_get_arg(stream->dynamic_acl, sizeof(stream->dynamic_acl), p); } else if (!av_strcasecmp(cmd, "RTSPOption")) { ffserver_get_arg(arg, sizeof(arg), p); av_freep(&stream->rtsp_option); stream->rtsp_option = av_strdup(arg); } else if (!av_strcasecmp(cmd, "MulticastAddress")) { ffserver_get_arg(arg, sizeof(arg), p); if (resolve_host(&stream->multicast_ip, arg) != 0) ERROR("Invalid host/IP address: %s\n", arg); stream->is_multicast = 1; stream->loop = 1; /* default is looping */ } else if (!av_strcasecmp(cmd, "MulticastPort")) { ffserver_get_arg(arg, sizeof(arg), p); stream->multicast_port = atoi(arg); } else if (!av_strcasecmp(cmd, "MulticastTTL")) { ffserver_get_arg(arg, sizeof(arg), p); stream->multicast_ttl = atoi(arg); } else if (!av_strcasecmp(cmd, "NoLoop")) { stream->loop = 0; } else if (!av_strcasecmp(cmd, "</Stream>")) { if (stream->feed && stream->fmt && strcmp(stream->fmt->name, "ffm") != 0) { if (config->audio_id != AV_CODEC_ID_NONE) { config->audio_enc.codec_type = AVMEDIA_TYPE_AUDIO; config->audio_enc.codec_id = config->audio_id; add_codec(stream, &config->audio_enc); } if (config->video_id != AV_CODEC_ID_NONE) { config->video_enc.codec_type = AVMEDIA_TYPE_VIDEO; config->video_enc.codec_id = config->video_id; add_codec(stream, &config->video_enc); } } *pstream = NULL; } else if (!av_strcasecmp(cmd, "File") || !av_strcasecmp(cmd, "ReadOnlyFile")) { ffserver_get_arg(stream->feed_filename, sizeof(stream->feed_filename), p); } else { ERROR("Invalid entry '%s' inside <Stream></Stream>\n", cmd); } return 0; }
static int gdi_read_header(AVFormatContext *s) { struct gdi_ctx *ctx = s->priv_data; AVCodecContext *codec; AVStream *st; int width, height; BITMAPINFOHEADER bih; int bihsize = sizeof(BITMAPINFOHEADER); sscanf( s->filename, "%d,%d", &ctx->x_off, &ctx->y_off ); memset( &bih, 0, sizeof(bih) ); width = GetSystemMetrics( SM_CXSCREEN ); height = GetSystemMetrics( SM_CYSCREEN ); if( ctx->x_off > width || ctx->y_off > height ) { av_log( s, AV_LOG_ERROR, "Specified size greater than Desktop size." " %dx%d > %dx%d\n", ctx->x_off, ctx->y_off, width, height ); return AVERROR(EIO); } width = width - ctx->x_off; height = height - ctx->y_off; ctx->hDesktopWnd = GetDesktopWindow( ); ctx->hDesktopDC = GetDC( ctx->hDesktopWnd ); ctx->hCaptureDC = CreateCompatibleDC( ctx->hDesktopDC ); ctx->hCaptureBitmap = CreateCompatibleBitmap( ctx->hDesktopDC, width, height ); SelectObject( ctx->hCaptureDC, ctx->hCaptureBitmap ); bih.biSize = bihsize; GetDIBits( ctx->hCaptureDC, ctx->hCaptureBitmap, 0, 0, NULL, &bih, DIB_RGB_COLORS ); if( bih.biCompression == 3 ) bihsize += 0x10; ctx->bih = av_mallocz( bihsize ); if( ctx->bih == NULL ) return AVERROR(ENOMEM); ctx->bih->biSize = bihsize; GetDIBits( ctx->hCaptureDC, ctx->hCaptureBitmap, 0, 0, NULL, ctx->bih, DIB_RGB_COLORS ); st = avformat_new_stream( s, NULL ); if ( !st ) return AVERROR(ENOMEM); av_log( s, AV_LOG_ERROR, "new gdi stream %d", s->nb_streams); codec = st->codec; codec->codec_type = AVMEDIA_TYPE_VIDEO; codec->codec_id = CODEC_ID_RAWVIDEO; //if(ctx->bih->biCompression == BI_RGB) { //codec->bits_per_coded_sample = ctx->bih->biBitCount; codec->extradata = av_malloc(9 + FF_INPUT_BUFFER_PADDING_SIZE); if (codec->extradata) { codec->extradata_size = 9; memcpy(codec->extradata, "BottomUp", 9); } //} codec->bits_per_coded_sample = ctx->bih->biBitCount; if( ctx->bih->biCompression > 3 ) codec->codec_tag = ctx->bih->biCompression; codec->width = ctx->bih->biWidth; codec->height = ctx->bih->biHeight; av_parse_video_rate(&codec->time_base, "ntsc"); codec->time_base = (AVRational){codec->time_base.den, codec->time_base.num}; av_set_pts_info( st, 32, 1, 1000 ); return 0; }
static int dshow_read_header(AVFormatContext *avctx) { struct dshow_ctx *ctx = avctx->priv_data; IGraphBuilder *graph = NULL; ICreateDevEnum *devenum = NULL; IMediaControl *control = NULL; IMediaEvent *media_event = NULL; HANDLE media_event_handle; HANDLE proc; int ret = AVERROR(EIO); int r; CoInitialize(0); if (!ctx->list_devices && !parse_device_name(avctx)) { av_log(avctx, AV_LOG_ERROR, "Malformed dshow input string.\n"); goto error; } ctx->video_codec_id = avctx->video_codec_id ? avctx->video_codec_id : AV_CODEC_ID_RAWVIDEO; if (ctx->pixel_format != AV_PIX_FMT_NONE) { if (ctx->video_codec_id != AV_CODEC_ID_RAWVIDEO) { av_log(avctx, AV_LOG_ERROR, "Pixel format may only be set when " "video codec is not set or set to rawvideo\n"); ret = AVERROR(EINVAL); goto error; } } if (ctx->framerate) { r = av_parse_video_rate(&ctx->requested_framerate, ctx->framerate); if (r < 0) { av_log(avctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); goto error; } } r = CoCreateInstance(&CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, &IID_IGraphBuilder, (void **) &graph); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not create capture graph.\n"); goto error; } ctx->graph = graph; r = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, &IID_ICreateDevEnum, (void **) &devenum); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not enumerate system devices.\n"); goto error; } if (ctx->list_devices) { av_log(avctx, AV_LOG_INFO, "DirectShow video devices (some may be both video and audio devices)\n"); dshow_cycle_devices(avctx, devenum, VideoDevice, VideoSourceDevice, NULL); av_log(avctx, AV_LOG_INFO, "DirectShow audio devices\n"); dshow_cycle_devices(avctx, devenum, AudioDevice, AudioSourceDevice, NULL); ret = AVERROR_EXIT; goto error; } if (ctx->list_options) { if (ctx->device_name[VideoDevice]) if ((r = dshow_list_device_options(avctx, devenum, VideoDevice, VideoSourceDevice))) { ret = r; goto error; } if (ctx->device_name[AudioDevice]) { if (dshow_list_device_options(avctx, devenum, AudioDevice, AudioSourceDevice)) { /* show audio options from combined video+audio sources as fallback */ if ((r = dshow_list_device_options(avctx, devenum, AudioDevice, VideoSourceDevice))) { ret = r; goto error; } } } } if (ctx->device_name[VideoDevice]) { if ((r = dshow_open_device(avctx, devenum, VideoDevice, VideoSourceDevice)) < 0 || (r = dshow_add_device(avctx, VideoDevice)) < 0) { ret = r; goto error; } } if (ctx->device_name[AudioDevice]) { if ((r = dshow_open_device(avctx, devenum, AudioDevice, AudioSourceDevice)) < 0 || (r = dshow_add_device(avctx, AudioDevice)) < 0) { av_log(avctx, AV_LOG_INFO, "Searching for audio device within video devices for %s\n", ctx->device_name[AudioDevice]); /* see if there's a video source with an audio pin with the given audio name */ if ((r = dshow_open_device(avctx, devenum, AudioDevice, VideoSourceDevice)) < 0 || (r = dshow_add_device(avctx, AudioDevice)) < 0) { ret = r; goto error; } } } if (ctx->list_options) { /* allow it to list crossbar options in dshow_open_device */ ret = AVERROR_EXIT; goto error; } ctx->curbufsize[0] = 0; ctx->curbufsize[1] = 0; ctx->mutex = CreateMutex(NULL, 0, NULL); if (!ctx->mutex) { av_log(avctx, AV_LOG_ERROR, "Could not create Mutex\n"); goto error; } ctx->event[1] = CreateEvent(NULL, 1, 0, NULL); if (!ctx->event[1]) { av_log(avctx, AV_LOG_ERROR, "Could not create Event\n"); goto error; } r = IGraphBuilder_QueryInterface(graph, &IID_IMediaControl, (void **) &control); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not get media control.\n"); goto error; } ctx->control = control; r = IGraphBuilder_QueryInterface(graph, &IID_IMediaEvent, (void **) &media_event); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not get media event.\n"); goto error; } ctx->media_event = media_event; r = IMediaEvent_GetEventHandle(media_event, (void *) &media_event_handle); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not get media event handle.\n"); goto error; } proc = GetCurrentProcess(); r = DuplicateHandle(proc, media_event_handle, proc, &ctx->event[0], 0, 0, DUPLICATE_SAME_ACCESS); if (!r) { av_log(avctx, AV_LOG_ERROR, "Could not duplicate media event handle.\n"); goto error; } r = IMediaControl_Run(control); if (r == S_FALSE) { OAFilterState pfs; r = IMediaControl_GetState(control, 0, &pfs); } if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not run graph (sometimes caused by a device already in use by other application)\n"); goto error; } ret = 0; error: if (devenum) ICreateDevEnum_Release(devenum); if (ret < 0) dshow_read_close(avctx); return ret; }
static int img_read_header(AVFormatContext *s1) { VideoDemuxData *s = s1->priv_data; int first_index, last_index, ret = 0; int width = 0, height = 0; AVStream *st; enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE; AVRational framerate; s1->ctx_flags |= AVFMTCTX_NOHEADER; st = avformat_new_stream(s1, NULL); if (!st) { return AVERROR(ENOMEM); } if (s->pixel_format && (pix_fmt = av_get_pix_fmt(s->pixel_format)) == AV_PIX_FMT_NONE) { av_log(s1, AV_LOG_ERROR, "No such pixel format: %s.\n", s->pixel_format); return AVERROR(EINVAL); } if (s->video_size && (ret = av_parse_video_size(&width, &height, s->video_size)) < 0) { av_log(s, AV_LOG_ERROR, "Could not parse video size: %s.\n", s->video_size); return ret; } if ((ret = av_parse_video_rate(&framerate, s->framerate)) < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate: %s.\n", s->framerate); return ret; } av_strlcpy(s->path, s1->filename, sizeof(s->path)); s->img_number = 0; s->img_count = 0; /* find format */ if (s1->iformat->flags & AVFMT_NOFILE) s->is_pipe = 0; else { s->is_pipe = 1; st->need_parsing = AVSTREAM_PARSE_FULL; } avpriv_set_pts_info(st, 60, framerate.den, framerate.num); if (width && height) { st->codec->width = width; st->codec->height = height; } if (!s->is_pipe) { if (find_image_range(&first_index, &last_index, s->path, FFMAX(s->start_number, 5)) < 0) return AVERROR(ENOENT); s->img_first = first_index; s->img_last = last_index; s->img_number = s->start_number != 1 ? s->start_number : first_index; /* compute duration */ st->start_time = 0; st->duration = last_index - first_index + 1; } if (s1->video_codec_id) { st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = s1->video_codec_id; } else if (s1->audio_codec_id) { st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->codec_id = s1->audio_codec_id; } else { st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = ff_guess_image2_codec(s->path); } if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO && pix_fmt != AV_PIX_FMT_NONE) st->codec->pix_fmt = pix_fmt; return 0; }
static int dshow_read_header(AVFormatContext *avctx) { struct dshow_ctx *ctx = avctx->priv_data; IGraphBuilder *graph = NULL; ICreateDevEnum *devenum = NULL; IMediaControl *control = NULL; int ret = AVERROR(EIO); int r; if (!ctx->list_devices && !parse_device_name(avctx)) { av_log(avctx, AV_LOG_ERROR, "Malformed dshow input string.\n"); goto error; } ctx->video_codec_id = avctx->video_codec_id ? avctx->video_codec_id : AV_CODEC_ID_RAWVIDEO; if (ctx->pixel_format != AV_PIX_FMT_NONE) { if (ctx->video_codec_id != AV_CODEC_ID_RAWVIDEO) { av_log(avctx, AV_LOG_ERROR, "Pixel format may only be set when " "video codec is not set or set to rawvideo\n"); ret = AVERROR(EINVAL); goto error; } } if (ctx->framerate) { r = av_parse_video_rate(&ctx->requested_framerate, ctx->framerate); if (r < 0) { av_log(avctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); goto error; } } CoInitialize(0); r = CoCreateInstance(&CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, &IID_IGraphBuilder, (void **) &graph); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not create capture graph.\n"); goto error; } ctx->graph = graph; r = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, &IID_ICreateDevEnum, (void **) &devenum); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not enumerate system devices.\n"); goto error; } if (ctx->list_devices) { av_log(avctx, AV_LOG_INFO, "DirectShow video devices\n"); dshow_cycle_devices(avctx, devenum, VideoDevice, NULL); av_log(avctx, AV_LOG_INFO, "DirectShow audio devices\n"); dshow_cycle_devices(avctx, devenum, AudioDevice, NULL); ret = AVERROR_EXIT; goto error; } if (ctx->list_options) { if (ctx->device_name[VideoDevice]) dshow_list_device_options(avctx, devenum, VideoDevice); if (ctx->device_name[AudioDevice]) dshow_list_device_options(avctx, devenum, AudioDevice); ret = AVERROR_EXIT; goto error; } if (ctx->device_name[VideoDevice]) { if ((r = dshow_open_device(avctx, devenum, VideoDevice)) < 0 || (r = dshow_add_device(avctx, VideoDevice)) < 0) { ret = r; goto error; } } if (ctx->device_name[AudioDevice]) { if ((r = dshow_open_device(avctx, devenum, AudioDevice)) < 0 || (r = dshow_add_device(avctx, AudioDevice)) < 0) { ret = r; goto error; } } ctx->mutex = CreateMutex(NULL, 0, NULL); if (!ctx->mutex) { av_log(avctx, AV_LOG_ERROR, "Could not create Mutex\n"); goto error; } ctx->event = CreateEvent(NULL, 1, 0, NULL); if (!ctx->event) { av_log(avctx, AV_LOG_ERROR, "Could not create Event\n"); goto error; } r = IGraphBuilder_QueryInterface(graph, &IID_IMediaControl, (void **) &control); if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not get media control.\n"); goto error; } ctx->control = control; r = IMediaControl_Run(control); if (r == S_FALSE) { OAFilterState pfs; r = IMediaControl_GetState(control, 0, &pfs); } if (r != S_OK) { av_log(avctx, AV_LOG_ERROR, "Could not run filter\n"); goto error; } ret = 0; error: if (ret < 0) dshow_read_close(avctx); if (devenum) ICreateDevEnum_Release(devenum); return ret; }
static int vfw_read_header(AVFormatContext *s) { struct vfw_ctx *ctx = s->priv_data; AVCodecContext *codec; AVStream *st; int devnum; int bisize; BITMAPINFO *bi = NULL; CAPTUREPARMS cparms; DWORD biCompression; WORD biBitCount; int ret; AVRational framerate_q; if (!strcmp(s->filename, "list")) { for (devnum = 0; devnum <= 9; devnum++) { char driver_name[256]; char driver_ver[256]; ret = capGetDriverDescription(devnum, driver_name, sizeof(driver_name), driver_ver, sizeof(driver_ver)); if (ret) { av_log(s, AV_LOG_INFO, "Driver %d\n", devnum); av_log(s, AV_LOG_INFO, " %s\n", driver_name); av_log(s, AV_LOG_INFO, " %s\n", driver_ver); } } return AVERROR(EIO); } ctx->hwnd = capCreateCaptureWindow(NULL, 0, 0, 0, 0, 0, HWND_MESSAGE, 0); if(!ctx->hwnd) { av_log(s, AV_LOG_ERROR, "Could not create capture window.\n"); return AVERROR(EIO); } /* If atoi fails, devnum==0 and the default device is used */ devnum = atoi(s->filename); ret = SendMessage(ctx->hwnd, WM_CAP_DRIVER_CONNECT, devnum, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not connect to device.\n"); DestroyWindow(ctx->hwnd); return AVERROR(ENODEV); } SendMessage(ctx->hwnd, WM_CAP_SET_OVERLAY, 0, 0); SendMessage(ctx->hwnd, WM_CAP_SET_PREVIEW, 0, 0); ret = SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, (LPARAM) videostream_cb); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n"); goto fail; } SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) s); st = avformat_new_stream(s, NULL); if(!st) { vfw_read_close(s); return AVERROR(ENOMEM); } /* Set video format */ bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0); if(!bisize) goto fail; bi = av_malloc(bisize); if(!bi) { vfw_read_close(s); return AVERROR(ENOMEM); } ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) goto fail; dump_bih(s, &bi->bmiHeader); ret = av_parse_video_rate(&framerate_q, ctx->framerate); if (ret < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); goto fail; } if (ctx->video_size) { ret = av_parse_video_size(&bi->bmiHeader.biWidth, &bi->bmiHeader.biHeight, ctx->video_size); if (ret < 0) { av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto fail; } } if (0) { /* For testing yet unsupported compressions * Copy these values from user-supplied verbose information */ bi->bmiHeader.biWidth = 320; bi->bmiHeader.biHeight = 240; bi->bmiHeader.biPlanes = 1; bi->bmiHeader.biBitCount = 12; bi->bmiHeader.biCompression = MKTAG('I','4','2','0'); bi->bmiHeader.biSizeImage = 115200; dump_bih(s, &bi->bmiHeader); } ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n"); goto fail; } biCompression = bi->bmiHeader.biCompression; biBitCount = bi->bmiHeader.biBitCount; /* Set sequence setup */ ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) goto fail; dump_captureparms(s, &cparms); cparms.fYield = 1; // Spawn a background thread cparms.dwRequestMicroSecPerFrame = (framerate_q.den*1000000) / framerate_q.num; cparms.fAbortLeftMouse = 0; cparms.fAbortRightMouse = 0; cparms.fCaptureAudio = 0; cparms.vKeyAbort = 0; ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) goto fail; codec = st->codec; codec->time_base = av_inv_q(framerate_q); codec->codec_type = AVMEDIA_TYPE_VIDEO; codec->width = bi->bmiHeader.biWidth; codec->height = bi->bmiHeader.biHeight; codec->pix_fmt = vfw_pixfmt(biCompression, biBitCount); if(codec->pix_fmt == AV_PIX_FMT_NONE) { codec->codec_id = vfw_codecid(biCompression); if(codec->codec_id == AV_CODEC_ID_NONE) { av_log(s, AV_LOG_ERROR, "Unknown compression type. " "Please report verbose (-v 9) debug information.\n"); vfw_read_close(s); return AVERROR_PATCHWELCOME; } codec->bits_per_coded_sample = biBitCount; } else { codec->codec_id = AV_CODEC_ID_RAWVIDEO; if(biCompression == BI_RGB) { codec->bits_per_coded_sample = biBitCount; codec->extradata = av_malloc(9 + AV_INPUT_BUFFER_PADDING_SIZE); if (codec->extradata) { codec->extradata_size = 9; memcpy(codec->extradata, "BottomUp", 9); } } } av_freep(&bi); avpriv_set_pts_info(st, 32, 1, 1000); ctx->mutex = CreateMutex(NULL, 0, NULL); if(!ctx->mutex) { av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" ); goto fail; } ctx->event = CreateEvent(NULL, 1, 0, NULL); if(!ctx->event) { av_log(s, AV_LOG_ERROR, "Could not create Event.\n" ); goto fail; } ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" ); goto fail; } return 0; fail: av_freep(&bi); vfw_read_close(s); return AVERROR(EIO); }
/** * Initialize the x11 grab device demuxer (public device demuxer API). * * @param s1 Context from avformat core * @return <ul> * <li>AVERROR(ENOMEM) no memory left</li> * <li>AVERROR(EIO) other failure case</li> * <li>0 success</li> * </ul> */ static int x11grab_read_header(AVFormatContext *s1) { struct x11_grab *x11grab = s1->priv_data; Display *dpy; AVStream *st = NULL; enum PixelFormat input_pixfmt; XImage *image; int x_off = 0; int y_off = 0; int screen; int use_shm; char *dpyname, *offset; int ret = 0; AVRational framerate; dpyname = av_strdup(s1->filename); offset = strchr(dpyname, '+'); if (offset) { sscanf(offset, "%d,%d", &x_off, &y_off); x11grab->draw_mouse = !strstr(offset, "nomouse"); *offset= 0; } if ((ret = av_parse_video_size(&x11grab->width, &x11grab->height, x11grab->video_size)) < 0) { av_log(s1, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto out; } if ((ret = av_parse_video_rate(&framerate, x11grab->framerate)) < 0) { av_log(s1, AV_LOG_ERROR, "Could not parse framerate: %s.\n", x11grab->framerate); goto out; } av_log(s1, AV_LOG_INFO, "device: %s -> display: %s x: %d y: %d width: %d height: %d\n", s1->filename, dpyname, x_off, y_off, x11grab->width, x11grab->height); dpy = XOpenDisplay(dpyname); av_freep(&dpyname); if(!dpy) { av_log(s1, AV_LOG_ERROR, "Could not open X display.\n"); ret = AVERROR(EIO); goto out; } st = avformat_new_stream(s1, NULL); if (!st) { ret = AVERROR(ENOMEM); goto out; } avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ screen = DefaultScreen(dpy); if (x11grab->follow_mouse) { int screen_w, screen_h; Window w; screen_w = DisplayWidth(dpy, screen); screen_h = DisplayHeight(dpy, screen); XQueryPointer(dpy, RootWindow(dpy, screen), &w, &w, &x_off, &y_off, &ret, &ret, &ret); x_off -= x11grab->width / 2; y_off -= x11grab->height / 2; x_off = FFMIN(FFMAX(x_off, 0), screen_w - x11grab->width); y_off = FFMIN(FFMAX(y_off, 0), screen_h - x11grab->height); av_log(s1, AV_LOG_INFO, "followmouse is enabled, resetting grabbing region to x: %d y: %d\n", x_off, y_off); } use_shm = XShmQueryExtension(dpy); av_log(s1, AV_LOG_INFO, "shared memory extension%s found\n", use_shm ? "" : " not"); if(use_shm) { int scr = XDefaultScreen(dpy); image = XShmCreateImage(dpy, DefaultVisual(dpy, scr), DefaultDepth(dpy, scr), ZPixmap, NULL, &x11grab->shminfo, x11grab->width, x11grab->height); x11grab->shminfo.shmid = shmget(IPC_PRIVATE, image->bytes_per_line * image->height, IPC_CREAT|0777); if (x11grab->shminfo.shmid == -1) { av_log(s1, AV_LOG_ERROR, "Fatal: Can't get shared memory!\n"); ret = AVERROR(ENOMEM); goto out; } x11grab->shminfo.shmaddr = image->data = shmat(x11grab->shminfo.shmid, 0, 0); x11grab->shminfo.readOnly = False; if (!XShmAttach(dpy, &x11grab->shminfo)) { av_log(s1, AV_LOG_ERROR, "Fatal: Failed to attach shared memory!\n"); /* needs some better error subroutine :) */ ret = AVERROR(EIO); goto out; } } else { image = XGetImage(dpy, RootWindow(dpy, screen), x_off,y_off, x11grab->width, x11grab->height, AllPlanes, ZPixmap); } switch (image->bits_per_pixel) { case 8: av_log (s1, AV_LOG_DEBUG, "8 bit palette\n"); input_pixfmt = PIX_FMT_PAL8; break; case 16: if ( image->red_mask == 0xf800 && image->green_mask == 0x07e0 && image->blue_mask == 0x001f ) { av_log (s1, AV_LOG_DEBUG, "16 bit RGB565\n"); input_pixfmt = PIX_FMT_RGB565; } else if (image->red_mask == 0x7c00 && image->green_mask == 0x03e0 && image->blue_mask == 0x001f ) { av_log(s1, AV_LOG_DEBUG, "16 bit RGB555\n"); input_pixfmt = PIX_FMT_RGB555; } else { av_log(s1, AV_LOG_ERROR, "RGB ordering at image depth %i not supported ... aborting\n", image->bits_per_pixel); av_log(s1, AV_LOG_ERROR, "color masks: r 0x%.6lx g 0x%.6lx b 0x%.6lx\n", image->red_mask, image->green_mask, image->blue_mask); ret = AVERROR(EIO); goto out; } break; case 24: if ( image->red_mask == 0xff0000 && image->green_mask == 0x00ff00 && image->blue_mask == 0x0000ff ) { input_pixfmt = PIX_FMT_BGR24; } else if ( image->red_mask == 0x0000ff && image->green_mask == 0x00ff00 && image->blue_mask == 0xff0000 ) { input_pixfmt = PIX_FMT_RGB24; } else { av_log(s1, AV_LOG_ERROR,"rgb ordering at image depth %i not supported ... aborting\n", image->bits_per_pixel); av_log(s1, AV_LOG_ERROR, "color masks: r 0x%.6lx g 0x%.6lx b 0x%.6lx\n", image->red_mask, image->green_mask, image->blue_mask); ret = AVERROR(EIO); goto out; } break; case 32: input_pixfmt = PIX_FMT_0RGB32; break; default: av_log(s1, AV_LOG_ERROR, "image depth %i not supported ... aborting\n", image->bits_per_pixel); ret = AVERROR(EINVAL); goto out; } x11grab->frame_size = x11grab->width * x11grab->height * image->bits_per_pixel/8; x11grab->dpy = dpy; x11grab->time_base = (AVRational) { framerate.den, framerate.num }; x11grab->time_frame = av_gettime() / av_q2d(x11grab->time_base); x11grab->x_off = x_off; x11grab->y_off = y_off; x11grab->image = image; x11grab->use_shm = use_shm; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = CODEC_ID_RAWVIDEO; st->codec->width = x11grab->width; st->codec->height = x11grab->height; st->codec->pix_fmt = input_pixfmt; st->codec->time_base = x11grab->time_base; st->codec->bit_rate = x11grab->frame_size * 1/av_q2d(x11grab->time_base) * 8; out: return ret; }
static int ffserver_parse_config_stream(FFServerConfig *config, const char *cmd, const char **p, int line_num, FFServerStream **pstream) { char arg[1024], arg2[1024]; FFServerStream *stream; int val; av_assert0(pstream); stream = *pstream; if (!av_strcasecmp(cmd, "<Stream")) { char *q; FFServerStream *s; stream = av_mallocz(sizeof(FFServerStream)); if (!stream) return AVERROR(ENOMEM); config->dummy_ctx = avcodec_alloc_context3(NULL); if (!config->dummy_ctx) { av_free(stream); return AVERROR(ENOMEM); } ffserver_get_arg(stream->filename, sizeof(stream->filename), p); q = strrchr(stream->filename, '>'); if (q) *q = '\0'; for (s = config->first_stream; s; s = s->next) { if (!strcmp(stream->filename, s->filename)) ERROR("Stream '%s' already registered\n", s->filename); } stream->fmt = ffserver_guess_format(NULL, stream->filename, NULL); if (stream->fmt) { config->audio_id = stream->fmt->audio_codec; config->video_id = stream->fmt->video_codec; } else { config->audio_id = AV_CODEC_ID_NONE; config->video_id = AV_CODEC_ID_NONE; } *pstream = stream; return 0; } av_assert0(stream); if (!av_strcasecmp(cmd, "Feed")) { FFServerStream *sfeed; ffserver_get_arg(arg, sizeof(arg), p); sfeed = config->first_feed; while (sfeed) { if (!strcmp(sfeed->filename, arg)) break; sfeed = sfeed->next_feed; } if (!sfeed) ERROR("Feed with name '%s' for stream '%s' is not defined\n", arg, stream->filename); else stream->feed = sfeed; } else if (!av_strcasecmp(cmd, "Format")) { ffserver_get_arg(arg, sizeof(arg), p); if (!strcmp(arg, "status")) { stream->stream_type = STREAM_TYPE_STATUS; stream->fmt = NULL; } else { stream->stream_type = STREAM_TYPE_LIVE; /* JPEG cannot be used here, so use single frame MJPEG */ if (!strcmp(arg, "jpeg")) strcpy(arg, "mjpeg"); stream->fmt = ffserver_guess_format(arg, NULL, NULL); if (!stream->fmt) ERROR("Unknown Format: %s\n", arg); } if (stream->fmt) { config->audio_id = stream->fmt->audio_codec; config->video_id = stream->fmt->video_codec; } } else if (!av_strcasecmp(cmd, "InputFormat")) { ffserver_get_arg(arg, sizeof(arg), p); stream->ifmt = av_find_input_format(arg); if (!stream->ifmt) ERROR("Unknown input format: %s\n", arg); } else if (!av_strcasecmp(cmd, "FaviconURL")) { if (stream->stream_type == STREAM_TYPE_STATUS) ffserver_get_arg(stream->feed_filename, sizeof(stream->feed_filename), p); else ERROR("FaviconURL only permitted for status streams\n"); } else if (!av_strcasecmp(cmd, "Author") || !av_strcasecmp(cmd, "Comment") || !av_strcasecmp(cmd, "Copyright") || !av_strcasecmp(cmd, "Title")) { char key[32]; int i; ffserver_get_arg(arg, sizeof(arg), p); for (i = 0; i < strlen(cmd); i++) key[i] = av_tolower(cmd[i]); key[i] = 0; WARNING("'%s' option in configuration file is deprecated, " "use 'Metadata %s VALUE' instead\n", cmd, key); if (av_dict_set(&stream->metadata, key, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "Metadata")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_get_arg(arg2, sizeof(arg2), p); if (av_dict_set(&stream->metadata, arg, arg2, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "Preroll")) { ffserver_get_arg(arg, sizeof(arg), p); stream->prebuffer = atof(arg) * 1000; } else if (!av_strcasecmp(cmd, "StartSendOnKey")) { stream->send_on_key = 1; } else if (!av_strcasecmp(cmd, "AudioCodec")) { ffserver_get_arg(arg, sizeof(arg), p); config->audio_id = opt_codec(arg, AVMEDIA_TYPE_AUDIO); if (config->audio_id == AV_CODEC_ID_NONE) ERROR("Unknown AudioCodec: %s\n", arg); } else if (!av_strcasecmp(cmd, "VideoCodec")) { ffserver_get_arg(arg, sizeof(arg), p); config->video_id = opt_codec(arg, AVMEDIA_TYPE_VIDEO); if (config->video_id == AV_CODEC_ID_NONE) ERROR("Unknown VideoCodec: %s\n", arg); } else if (!av_strcasecmp(cmd, "MaxTime")) { ffserver_get_arg(arg, sizeof(arg), p); stream->max_time = atof(arg) * 1000; } else if (!av_strcasecmp(cmd, "AudioBitRate")) { float f; ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_float_param(&f, arg, 1000, 0, FLT_MAX, config, line_num, "Invalid %s: %s\n", cmd, arg); if (av_dict_set_int(&config->audio_conf, cmd, lrintf(f), 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "AudioChannels")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, 1, 8, config, line_num, "Invalid %s: %s, valid range is 1-8.", cmd, arg); if (av_dict_set(&config->audio_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "AudioSampleRate")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, 0, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->audio_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoBitRateRange")) { int minrate, maxrate; ffserver_get_arg(arg, sizeof(arg), p); if (sscanf(arg, "%d-%d", &minrate, &maxrate) == 2) { if (av_dict_set_int(&config->video_conf, "VideoBitRateRangeMin", minrate, 0) < 0 || av_dict_set_int(&config->video_conf, "VideoBitRateRangeMax", maxrate, 0) < 0) goto nomem; } else ERROR("Incorrect format for VideoBitRateRange -- should be " "<min>-<max>: %s\n", arg); } else if (!av_strcasecmp(cmd, "Debug")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, INT_MIN, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "Strict")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, INT_MIN, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoBufferSize")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 8*1024, 0, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoBitRateTolerance")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 1000, INT_MIN, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoBitRate")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 1000, 0, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoSize")) { int ret, w, h; ffserver_get_arg(arg, sizeof(arg), p); ret = av_parse_video_size(&w, &h, arg); if (ret < 0) ERROR("Invalid video size '%s'\n", arg); else if ((w % 16) || (h % 16)) ERROR("Image size must be a multiple of 16\n"); if (av_dict_set_int(&config->video_conf, "VideoSizeWidth", w, 0) < 0 || av_dict_set_int(&config->video_conf, "VideoSizeHeight", h, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoFrameRate")) { AVRational frame_rate; ffserver_get_arg(arg, sizeof(arg), p); if (av_parse_video_rate(&frame_rate, arg) < 0) { ERROR("Incorrect frame rate: %s\n", arg); } else { if (av_dict_set_int(&config->video_conf, "VideoFrameRateNum", frame_rate.num, 0) < 0 || av_dict_set_int(&config->video_conf, "VideoFrameRateDen", frame_rate.den, 0) < 0) goto nomem; } } else if (!av_strcasecmp(cmd, "PixelFormat")) { enum AVPixelFormat pix_fmt; ffserver_get_arg(arg, sizeof(arg), p); pix_fmt = av_get_pix_fmt(arg); if (pix_fmt == AV_PIX_FMT_NONE) ERROR("Unknown pixel format: %s\n", arg); if (av_dict_set_int(&config->video_conf, cmd, pix_fmt, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoGopSize")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, INT_MIN, INT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoIntraOnly")) { if (av_dict_set(&config->video_conf, cmd, "1", 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoHighQuality")) { if (av_dict_set(&config->video_conf, cmd, "", 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "Video4MotionVector")) { if (av_dict_set(&config->video_conf, cmd, "", 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "AVOptionVideo") || !av_strcasecmp(cmd, "AVOptionAudio")) { int ret; ffserver_get_arg(arg, sizeof(arg), p); ffserver_get_arg(arg2, sizeof(arg2), p); if (!av_strcasecmp(cmd, "AVOptionVideo")) ret = ffserver_save_avoption(arg, arg2, &config->video_opts, AV_OPT_FLAG_VIDEO_PARAM ,config, line_num); else ret = ffserver_save_avoption(arg, arg2, &config->audio_opts, AV_OPT_FLAG_AUDIO_PARAM ,config, line_num); if (ret < 0) goto nomem; } else if (!av_strcasecmp(cmd, "AVPresetVideo") || !av_strcasecmp(cmd, "AVPresetAudio")) { char **preset = NULL; ffserver_get_arg(arg, sizeof(arg), p); if (!av_strcasecmp(cmd, "AVPresetVideo")) { preset = &config->video_preset; ffserver_opt_preset(arg, NULL, 0, NULL, &config->video_id); } else { preset = &config->audio_preset; ffserver_opt_preset(arg, NULL, 0, &config->audio_id, NULL); } *preset = av_strdup(arg); if (!preset) return AVERROR(ENOMEM); } else if (!av_strcasecmp(cmd, "VideoTag")) { ffserver_get_arg(arg, sizeof(arg), p); if (strlen(arg) == 4) { if (av_dict_set(&config->video_conf, "VideoTag", "arg", 0) < 0) goto nomem; } } else if (!av_strcasecmp(cmd, "BitExact")) { if (av_dict_set(&config->video_conf, cmd, "", 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "DctFastint")) { if (av_dict_set(&config->video_conf, cmd, "", 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "IdctSimple")) { if (av_dict_set(&config->video_conf, cmd, "", 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "Qscale")) { ffserver_get_arg(arg, sizeof(arg), p); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoQDiff")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, 1, 31, config, line_num, "%s out of range\n", cmd); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoQMax")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, 1, 31, config, line_num, "%s out of range\n", cmd); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "VideoQMin")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(NULL, arg, 0, 1, 31, config, line_num, "%s out of range\n", cmd); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "LumiMask")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_float_param(NULL, arg, 0, -FLT_MAX, FLT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "DarkMask")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_float_param(NULL, arg, 0, -FLT_MAX, FLT_MAX, config, line_num, "Invalid %s: %s", cmd, arg); if (av_dict_set(&config->video_conf, cmd, arg, 0) < 0) goto nomem; } else if (!av_strcasecmp(cmd, "NoVideo")) { config->video_id = AV_CODEC_ID_NONE; } else if (!av_strcasecmp(cmd, "NoAudio")) { config->audio_id = AV_CODEC_ID_NONE; } else if (!av_strcasecmp(cmd, "ACL")) { ffserver_parse_acl_row(stream, NULL, NULL, *p, config->filename, line_num); } else if (!av_strcasecmp(cmd, "DynamicACL")) { ffserver_get_arg(stream->dynamic_acl, sizeof(stream->dynamic_acl), p); } else if (!av_strcasecmp(cmd, "RTSPOption")) { ffserver_get_arg(arg, sizeof(arg), p); av_freep(&stream->rtsp_option); stream->rtsp_option = av_strdup(arg); } else if (!av_strcasecmp(cmd, "MulticastAddress")) { ffserver_get_arg(arg, sizeof(arg), p); if (resolve_host(&stream->multicast_ip, arg)) ERROR("Invalid host/IP address: %s\n", arg); stream->is_multicast = 1; stream->loop = 1; /* default is looping */ } else if (!av_strcasecmp(cmd, "MulticastPort")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(&val, arg, 0, 1, 65535, config, line_num, "Invalid MulticastPort: %s\n", arg); stream->multicast_port = val; } else if (!av_strcasecmp(cmd, "MulticastTTL")) { ffserver_get_arg(arg, sizeof(arg), p); ffserver_set_int_param(&val, arg, 0, INT_MIN, INT_MAX, config, line_num, "Invalid MulticastTTL: %s\n", arg); stream->multicast_ttl = val; } else if (!av_strcasecmp(cmd, "NoLoop")) { stream->loop = 0; } else if (!av_strcasecmp(cmd, "</Stream>")) { if (stream->feed && stream->fmt && strcmp(stream->fmt->name, "ffm")) { if (config->audio_id != AV_CODEC_ID_NONE) { AVCodecContext *audio_enc = avcodec_alloc_context3(avcodec_find_encoder(config->audio_id)); if (config->audio_preset && ffserver_opt_preset(arg, audio_enc, AV_OPT_FLAG_AUDIO_PARAM|AV_OPT_FLAG_ENCODING_PARAM, NULL, NULL) < 0) ERROR("Could not apply preset '%s'\n", arg); ffserver_apply_stream_config(audio_enc, config->audio_conf, &config->audio_opts); add_codec(stream, audio_enc); } if (config->video_id != AV_CODEC_ID_NONE) { AVCodecContext *video_enc = avcodec_alloc_context3(avcodec_find_encoder(config->video_id)); if (config->video_preset && ffserver_opt_preset(arg, video_enc, AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_ENCODING_PARAM, NULL, NULL) < 0) ERROR("Could not apply preset '%s'\n", arg); ffserver_apply_stream_config(video_enc, config->video_conf, &config->video_opts); add_codec(stream, video_enc); } } av_dict_free(&config->video_opts); av_dict_free(&config->video_conf); av_dict_free(&config->audio_opts); av_dict_free(&config->audio_conf); av_freep(&config->video_preset); av_freep(&config->audio_preset); avcodec_free_context(&config->dummy_ctx); *pstream = NULL; } else if (!av_strcasecmp(cmd, "File") || !av_strcasecmp(cmd, "ReadOnlyFile")) { ffserver_get_arg(stream->feed_filename, sizeof(stream->feed_filename), p); } else { ERROR("Invalid entry '%s' inside <Stream></Stream>\n", cmd); } return 0; nomem: av_log(NULL, AV_LOG_ERROR, "Out of memory. Aborting.\n"); av_dict_free(&config->video_opts); av_dict_free(&config->video_conf); av_dict_free(&config->audio_opts); av_dict_free(&config->audio_conf); av_freep(&config->video_preset); av_freep(&config->audio_preset); avcodec_free_context(&config->dummy_ctx); return AVERROR(ENOMEM); }
/** * Initialize the x11 grab device demuxer (public device demuxer API). * * @param s1 Context from avformat core * @return <ul> * <li>AVERROR(ENOMEM) no memory left</li> * <li>AVERROR(EIO) other failure case</li> * <li>0 success</li> * </ul> */ static int x11grab_read_header(AVFormatContext *s1) { X11GrabContext *x11grab = s1->priv_data; Display *dpy; AVStream *st = NULL; XImage *image; int x_off = 0, y_off = 0, ret = 0, screen, use_shm; char *param, *offset; AVRational framerate; param = av_strdup(s1->filename); if (!param) goto out; offset = strchr(param, '+'); if (offset) { sscanf(offset, "%d,%d", &x_off, &y_off); x11grab->draw_mouse = !strstr(offset, "nomouse"); *offset = 0; } ret = av_parse_video_size(&x11grab->width, &x11grab->height, x11grab->video_size); if (ret < 0) { av_log(s1, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto out; } ret = av_parse_video_rate(&framerate, x11grab->framerate); if (ret < 0) { av_log(s1, AV_LOG_ERROR, "Could not parse framerate: %s.\n", x11grab->framerate); goto out; } av_log(s1, AV_LOG_INFO, "device: %s -> display: %s x: %d y: %d width: %d height: %d\n", s1->filename, param, x_off, y_off, x11grab->width, x11grab->height); dpy = XOpenDisplay(param); if (!dpy) { av_log(s1, AV_LOG_ERROR, "Could not open X display.\n"); ret = AVERROR(EIO); goto out; } st = avformat_new_stream(s1, NULL); if (!st) { ret = AVERROR(ENOMEM); goto out; } avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ screen = DefaultScreen(dpy); if (x11grab->follow_mouse) { int screen_w, screen_h; Window w; screen_w = DisplayWidth(dpy, screen); screen_h = DisplayHeight(dpy, screen); XQueryPointer(dpy, RootWindow(dpy, screen), &w, &w, &x_off, &y_off, &ret, &ret, &ret); x_off -= x11grab->width / 2; y_off -= x11grab->height / 2; x_off = FFMIN(FFMAX(x_off, 0), screen_w - x11grab->width); y_off = FFMIN(FFMAX(y_off, 0), screen_h - x11grab->height); av_log(s1, AV_LOG_INFO, "followmouse is enabled, resetting grabbing region to x: %d y: %d\n", x_off, y_off); } use_shm = XShmQueryExtension(dpy); av_log(s1, AV_LOG_INFO, "shared memory extension %sfound\n", use_shm ? "" : "not "); if (use_shm && setup_shm(s1, dpy, &image) < 0) { av_log(s1, AV_LOG_WARNING, "Falling back to XGetImage\n"); use_shm = 0; } if (!use_shm) { image = XGetImage(dpy, RootWindow(dpy, screen), x_off, y_off, x11grab->width, x11grab->height, AllPlanes, ZPixmap); } if (x11grab->draw_mouse && setup_mouse(dpy, screen) < 0) { av_log(s1, AV_LOG_WARNING, "XFixes not available, cannot draw the mouse cursor\n"); x11grab->draw_mouse = 0; } x11grab->frame_size = x11grab->width * x11grab->height * image->bits_per_pixel / 8; x11grab->dpy = dpy; x11grab->time_base = (AVRational) { framerate.den, framerate.num }; x11grab->time_frame = av_gettime() / av_q2d(x11grab->time_base); x11grab->x_off = x_off; x11grab->y_off = y_off; x11grab->image = image; x11grab->use_shm = use_shm; ret = pixfmt_from_image(s1, image, &st->codec->pix_fmt); if (ret < 0) goto out; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; st->codec->width = x11grab->width; st->codec->height = x11grab->height; st->codec->time_base = x11grab->time_base; st->codec->bit_rate = x11grab->frame_size * 1 / av_q2d(x11grab->time_base) * 8; out: av_free(param); return ret; }
/* raw input */ int ff_raw_read_header(AVFormatContext *s) { AVStream *st; enum AVCodecID id; st = avformat_new_stream(s, NULL); if (!st) return AVERROR(ENOMEM); id = s->iformat->raw_codec_id; if (id == AV_CODEC_ID_RAWVIDEO) { st->codec->codec_type = AVMEDIA_TYPE_VIDEO; } else { st->codec->codec_type = AVMEDIA_TYPE_AUDIO; } st->codec->codec_id = id; switch(st->codec->codec_type) { case AVMEDIA_TYPE_AUDIO: { RawAudioDemuxerContext *s1 = s->priv_data; st->codec->channels = 1; if (id == AV_CODEC_ID_ADPCM_G722) st->codec->sample_rate = 16000; if (s1 && s1->sample_rate) st->codec->sample_rate = s1->sample_rate; if (s1 && s1->channels) st->codec->channels = s1->channels; st->codec->bits_per_coded_sample = av_get_bits_per_sample(st->codec->codec_id); assert(st->codec->bits_per_coded_sample > 0); st->codec->block_align = st->codec->bits_per_coded_sample*st->codec->channels/8; avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate); break; } case AVMEDIA_TYPE_VIDEO: { FFRawVideoDemuxerContext *s1 = s->priv_data; int width = 0, height = 0, ret = 0; enum AVPixelFormat pix_fmt; AVRational framerate; if (s1->video_size && (ret = av_parse_video_size(&width, &height, s1->video_size)) < 0) { av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto fail; } if ((pix_fmt = av_get_pix_fmt(s1->pixel_format)) == AV_PIX_FMT_NONE) { av_log(s, AV_LOG_ERROR, "No such pixel format: %s.\n", s1->pixel_format); ret = AVERROR(EINVAL); goto fail; } if ((ret = av_parse_video_rate(&framerate, s1->framerate)) < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate: %s.\n", s1->framerate); goto fail; } avpriv_set_pts_info(st, 64, framerate.den, framerate.num); st->codec->width = width; st->codec->height = height; st->codec->pix_fmt = pix_fmt; fail: return ret; } default: return -1; } return 0; }
/* raw input */ int ff_raw_read_header(AVFormatContext *s, AVFormatParameters *ap) { AVStream *st; enum CodecID id; st = av_new_stream(s, 0); if (!st) return AVERROR(ENOMEM); id = s->iformat->value; if (id == CODEC_ID_RAWVIDEO) { st->codec->codec_type = AVMEDIA_TYPE_VIDEO; } else { st->codec->codec_type = AVMEDIA_TYPE_AUDIO; } st->codec->codec_id = id; switch(st->codec->codec_type) { case AVMEDIA_TYPE_AUDIO: { RawAudioDemuxerContext *s1 = s->priv_data; #if FF_API_FORMAT_PARAMETERS if (ap->sample_rate) st->codec->sample_rate = ap->sample_rate; if (ap->channels) st->codec->channels = ap->channels; else st->codec->channels = 1; #endif if (s1->sample_rate) st->codec->sample_rate = s1->sample_rate; if (s1->channels) st->codec->channels = s1->channels; st->codec->bits_per_coded_sample = av_get_bits_per_sample(st->codec->codec_id); assert(st->codec->bits_per_coded_sample > 0); st->codec->block_align = st->codec->bits_per_coded_sample*st->codec->channels/8; av_set_pts_info(st, 64, 1, st->codec->sample_rate); break; } case AVMEDIA_TYPE_VIDEO: { FFRawVideoDemuxerContext *s1 = s->priv_data; int width = 0, height = 0, ret = 0; enum PixelFormat pix_fmt; AVRational framerate; if (s1->video_size && (ret = av_parse_video_size(&width, &height, s1->video_size)) < 0) { av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto fail; } if ((pix_fmt = av_get_pix_fmt(s1->pixel_format)) == PIX_FMT_NONE) { av_log(s, AV_LOG_ERROR, "No such pixel format: %s.\n", s1->pixel_format); ret = AVERROR(EINVAL); goto fail; } if ((ret = av_parse_video_rate(&framerate, s1->framerate)) < 0) { av_log(s, AV_LOG_ERROR, "Could not parse framerate: %s.\n", s1->framerate); goto fail; } #if FF_API_FORMAT_PARAMETERS if (ap->width > 0) width = ap->width; if (ap->height > 0) height = ap->height; if (ap->pix_fmt) pix_fmt = ap->pix_fmt; if (ap->time_base.num) framerate = (AVRational){ap->time_base.den, ap->time_base.num}; #endif av_set_pts_info(st, 64, framerate.den, framerate.num); st->codec->width = width; st->codec->height = height; st->codec->pix_fmt = pix_fmt; fail: av_freep(&s1->video_size); av_freep(&s1->pixel_format); av_freep(&s1->framerate); return ret; } default: return -1; } return 0; }
/** * Initialize the x11 grab device demuxer (public device demuxer API). * * @param s1 Context from avformat core * @param ap Parameters from avformat core * @return <ul> * <li>AVERROR(ENOMEM) no memory left</li> * <li>AVERROR(EIO) other failure case</li> * <li>0 success</li> * </ul> */ static int x11grab_read_header(AVFormatContext *s1, AVFormatParameters *ap) { struct x11_grab *x11grab = s1->priv_data; Display *dpy; AVStream *st = NULL; enum PixelFormat input_pixfmt; XImage *image; int x_off = 0; int y_off = 0; int use_shm; char *param, *offset; int ret = 0; AVRational framerate; param = av_strdup(s1->filename); offset = strchr(param, '+'); if (offset) { sscanf(offset, "%d,%d", &x_off, &y_off); x11grab->nomouse= strstr(offset, "nomouse"); *offset= 0; } if ((ret = av_parse_video_size(&x11grab->width, &x11grab->height, x11grab->video_size)) < 0) { av_log(s1, AV_LOG_ERROR, "Couldn't parse video size.\n"); goto out; } if ((ret = av_parse_video_rate(&framerate, x11grab->framerate)) < 0) { av_log(s1, AV_LOG_ERROR, "Could not parse framerate: %s.\n", x11grab->framerate); goto out; } #if FF_API_FORMAT_PARAMETERS if (ap->width > 0) x11grab->width = ap->width; if (ap->height > 0) x11grab->height = ap->height; if (ap->time_base.num) framerate = (AVRational){ap->time_base.den, ap->time_base.num}; #endif av_log(s1, AV_LOG_INFO, "device: %s -> display: %s x: %d y: %d width: %d height: %d\n", s1->filename, param, x_off, y_off, x11grab->width, x11grab->height); dpy = XOpenDisplay(param); if(!dpy) { av_log(s1, AV_LOG_ERROR, "Could not open X display.\n"); ret = AVERROR(EIO); goto out; } st = av_new_stream(s1, 0); if (!st) { ret = AVERROR(ENOMEM); goto out; } av_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ use_shm = XShmQueryExtension(dpy); av_log(s1, AV_LOG_INFO, "shared memory extension %s found\n", use_shm ? "" : "not"); if(use_shm) { int scr = XDefaultScreen(dpy); image = XShmCreateImage(dpy, DefaultVisual(dpy, scr), DefaultDepth(dpy, scr), ZPixmap, NULL, &x11grab->shminfo, x11grab->width, x11grab->height); x11grab->shminfo.shmid = shmget(IPC_PRIVATE, image->bytes_per_line * image->height, IPC_CREAT|0777); if (x11grab->shminfo.shmid == -1) { av_log(s1, AV_LOG_ERROR, "Fatal: Can't get shared memory!\n"); ret = AVERROR(ENOMEM); goto out; } x11grab->shminfo.shmaddr = image->data = shmat(x11grab->shminfo.shmid, 0, 0); x11grab->shminfo.readOnly = False; if (!XShmAttach(dpy, &x11grab->shminfo)) { av_log(s1, AV_LOG_ERROR, "Fatal: Failed to attach shared memory!\n"); /* needs some better error subroutine :) */ ret = AVERROR(EIO); goto out; } } else { image = XGetImage(dpy, RootWindow(dpy, DefaultScreen(dpy)), x_off,y_off, x11grab->width, x11grab->height, AllPlanes, ZPixmap); } switch (image->bits_per_pixel) { case 8: av_log (s1, AV_LOG_DEBUG, "8 bit palette\n"); input_pixfmt = PIX_FMT_PAL8; break; case 16: if ( image->red_mask == 0xf800 && image->green_mask == 0x07e0 && image->blue_mask == 0x001f ) { av_log (s1, AV_LOG_DEBUG, "16 bit RGB565\n"); input_pixfmt = PIX_FMT_RGB565; } else if (image->red_mask == 0x7c00 && image->green_mask == 0x03e0 && image->blue_mask == 0x001f ) { av_log(s1, AV_LOG_DEBUG, "16 bit RGB555\n"); input_pixfmt = PIX_FMT_RGB555; } else { av_log(s1, AV_LOG_ERROR, "RGB ordering at image depth %i not supported ... aborting\n", image->bits_per_pixel); av_log(s1, AV_LOG_ERROR, "color masks: r 0x%.6lx g 0x%.6lx b 0x%.6lx\n", image->red_mask, image->green_mask, image->blue_mask); ret = AVERROR(EIO); goto out; } break; case 24: if ( image->red_mask == 0xff0000 && image->green_mask == 0x00ff00 && image->blue_mask == 0x0000ff ) { input_pixfmt = PIX_FMT_BGR24; } else if ( image->red_mask == 0x0000ff && image->green_mask == 0x00ff00 && image->blue_mask == 0xff0000 ) { input_pixfmt = PIX_FMT_RGB24; } else { av_log(s1, AV_LOG_ERROR,"rgb ordering at image depth %i not supported ... aborting\n", image->bits_per_pixel); av_log(s1, AV_LOG_ERROR, "color masks: r 0x%.6lx g 0x%.6lx b 0x%.6lx\n", image->red_mask, image->green_mask, image->blue_mask); ret = AVERROR(EIO); goto out; } break; case 32: #if 0 GetColorInfo (image, &c_info); if ( c_info.alpha_mask == 0xff000000 && image->green_mask == 0x0000ff00) { /* byte order is relevant here, not endianness * endianness is handled by avcodec, but atm no such thing * as having ABGR, instead of ARGB in a word. Since we * need this for Solaris/SPARC, but need to do the conversion * for every frame we do it outside of this loop, cf. below * this matches both ARGB32 and ABGR32 */ input_pixfmt = PIX_FMT_ARGB32; } else { av_log(s1, AV_LOG_ERROR,"image depth %i not supported ... aborting\n", image->bits_per_pixel); return AVERROR(EIO); } #endif input_pixfmt = PIX_FMT_RGB32; break; default: av_log(s1, AV_LOG_ERROR, "image depth %i not supported ... aborting\n", image->bits_per_pixel); ret = AVERROR(EINVAL); goto out; } x11grab->frame_size = x11grab->width * x11grab->height * image->bits_per_pixel/8; x11grab->dpy = dpy; x11grab->time_base = (AVRational){framerate.den, framerate.num}; x11grab->time_frame = av_gettime() / av_q2d(x11grab->time_base); x11grab->x_off = x_off; x11grab->y_off = y_off; x11grab->image = image; x11grab->use_shm = use_shm; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = CODEC_ID_RAWVIDEO; st->codec->width = x11grab->width; st->codec->height = x11grab->height; st->codec->pix_fmt = input_pixfmt; st->codec->time_base = x11grab->time_base; st->codec->bit_rate = x11grab->frame_size * 1/av_q2d(x11grab->time_base) * 8; out: return ret; }
static int android_read_header(AVFormatContext *avctx) { int ret = AVERROR(EIO); int android_pix_fmt,android_sample_fmt; struct android_camera_ctx * ctx; ctx = avctx->priv_data; debug = ctx->debug; if (!ctx->list_devices && !parse_device_name(avctx)) { av_log(avctx, AV_LOG_ERROR, "Malformed android_camera input string.\n"); return ret; } ctx->video_codec_id = avctx->video_codec_id ? avctx->video_codec_id : AV_CODEC_ID_RAWVIDEO; /* * 如果有点格式但是不等于AV_CODEC_ID_RAWVIDEO返回错误 */ if (ctx->pixel_format != AV_PIX_FMT_NONE) { if (ctx->video_codec_id != AV_CODEC_ID_RAWVIDEO) { av_log(avctx, AV_LOG_ERROR, "Pixel format may only be set when " "video codec is not set or set to rawvideo\n"); ret = AVERROR(EINVAL); return ret; } } if (ctx->framerate) { ret = av_parse_video_rate(&ctx->requested_framerate, ctx->framerate); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); return ret; } } /* * 枚举设备 */ if (ctx->list_devices) { av_log(avctx, AV_LOG_INFO, "Android camera devices (some may be both video and audio devices)\n"); android_cycle_devices(avctx, VideoDevice); av_log(avctx, AV_LOG_INFO, "Android audio devices\n"); android_cycle_devices(avctx, AudioDevice); ret = AVERROR_EXIT; return ret; } /* * 枚举设备参数 */ if (ctx->list_options) { if (ctx->device_name[VideoDevice]) if ((ret = android_list_device_options(avctx, VideoDevice))) { return ret; } if (ctx->device_name[AudioDevice]) { if (android_list_device_options(avctx, AudioDevice)) { /* show audio options from combined video+audio sources as fallback */ if ((ret = android_list_device_options(avctx, AudioDevice))) { return ret; } } } ret = AVERROR_EXIT; return ret; } /* * 打开android摄像头设备和录音设备 */ int iDevice = parse_device_id(ctx->device_name[VideoDevice]); if(iDevice>=0) { android_setDemuxerCallback(android_grab_buffer); DEBUG("android_read_header android_openDemuxer:oes=%d,dev=%d,w=%d,h=%d,pixfmt=%d,fps=%d,ch=%d,bits=%d,rate=%d", ctx->oes_texture, iDevice, ctx->requested_width, ctx->requested_height, ctx->pixel_format, av_q2d(ctx->requested_framerate), ctx->channels, 16, ctx->sample_rate); android_pix_fmt = android_pixfmt2av(ctx->pixel_format); android_sample_fmt = 16; int result = android_openDemuxer(ctx->oes_texture, iDevice, ctx->requested_width, ctx->requested_height, android_pix_fmt, av_q2d(ctx->requested_framerate), ctx->channels, android_sample_fmt, ctx->sample_rate); if(result){ av_log(avctx, AV_LOG_ERROR, "android_openDemuxer return %d\n",result); return ret; } if( ctx->oes_texture >= -1 ) { ret = add_device(avctx, VideoDevice); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "add_device VideoDevice return %d\n", ret); return ret; } } if( ctx->channels > 0 ) { ret = add_device(avctx, AudioDevice); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "add_device AudioDevice return %d\n", ret); return ret; } } if(pthread_mutex_init(&ctx->mutex,NULL)){ av_log(avctx, AV_LOG_ERROR, "pthread_mutex_init non-zero"); return ret; } if(pthread_cond_init(&ctx->cond,NULL)){ av_log(avctx, AV_LOG_ERROR, "pthread_cond_init non-zero"); return ret; } /* * 一次只能打开一个俘获android设备 */ _avctx = avctx; ctx->bufsize = 0; ret = 0; }else{ av_log(avctx, AV_LOG_ERROR, "android_read_header videoDevice = %s\n",ctx->device_name[VideoDevice]); } DEBUG("android_read_header return %d",ret); return ret; }