int main(int argc, char **argv) { struct sigaction sigact = { { 0 } }; int ret = 0; config_filename = av_strdup("/etc/ffserver.conf"); av_lfg_init(&random_state, av_get_random_seed()); if ((ret = parse_ffconfig(config_filename)) < 0) { fprintf(stderr, "Error reading configuration file '%s': %s\n", config_filename, av_err2str(ret)); exit(1); } av_freep(&config_filename); // set logfile logfile = stdout; build_file_streams(); /* signal init */ signal(SIGPIPE, SIG_IGN); if (start_event_loop() < 0) { // http_log("Could not start server\n"); printf("could not start server\n"); exit(1); } return 0; }
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx) { char *tail, color_string2[128]; const ColorEntry *entry; int len, hex_offset = 0; if (color_string[0] == '#') { hex_offset = 1; } else if (!strncmp(color_string, "0x", 2)) hex_offset = 2; if (slen < 0) slen = strlen(color_string); av_strlcpy(color_string2, color_string + hex_offset, FFMIN(slen-hex_offset+1, sizeof(color_string2))); if ((tail = strchr(color_string2, ALPHA_SEP))) *tail++ = 0; len = strlen(color_string2); rgba_color[3] = 255; if (!strcasecmp(color_string2, "random") || !strcasecmp(color_string2, "bikeshed")) { int rgba = av_get_random_seed(); rgba_color[0] = rgba >> 24; rgba_color[1] = rgba >> 16; rgba_color[2] = rgba >> 8; rgba_color[3] = rgba; } else if (hex_offset ||
int av_parse_color(uint8_t *rgba_color, const char *color_string, void *log_ctx) { if (!strcasecmp(color_string, "random") || !strcasecmp(color_string, "bikeshed")) { int rgba = av_get_random_seed(); rgba_color[0] = rgba >> 24; rgba_color[1] = rgba >> 16; rgba_color[2] = rgba >> 8; rgba_color[3] = rgba; } else
int generate_playback_session_id(char * ssid, int size) { AVLFG rnd; char *ex_id = "314F1B49-AA3C-4715-950D-"; unsigned int session_id = 0; char session_name[SESSLEN] = ""; av_lfg_init(&rnd, av_get_random_seed()); session_id = av_lfg_get(&rnd); snprintf(session_name, SESSLEN, "%s%012u", ex_id, session_id); if (ssid != NULL) { snprintf(ssid, size, "%s", session_name); return 0; } return -1; }
int generate_segment_session_id(char * ssid, int size) { AVLFG rnd; char *ex_id = "E4499E08-D4C5-4DB0-A21C-"; unsigned int session_id = 0; char session_name[SESSLEN] = ""; av_lfg_init(&rnd, av_get_random_seed()); session_id = av_lfg_get(&rnd); snprintf(session_name, SESSLEN, "%s%012u", ex_id, session_id); if (ssid != NULL) { snprintf(ssid, size, "%s", session_name); return 0; } return -1; }
int main(void) { int i, j, retry; uint32_t seeds[N]; for (retry=0; retry<3; retry++){ for (i=0; i<N; i++){ seeds[i] = av_get_random_seed(); for (j=0; j<i; j++) if (seeds[j] == seeds[i]) goto retry; } printf("seeds OK\n"); return 0; retry:; } printf("FAIL at %d with %X\n", j, seeds[j]); return 1; }
static int rtp_open( hnd_t *p_handle, obe_udp_opts_t *udp_opts ) { obe_rtp_ctx *p_rtp = calloc( 1, sizeof(*p_rtp) ); if( !p_rtp ) { fprintf( stderr, "[rtp] malloc failed" ); return -1; } if( udp_open( &p_rtp->udp_handle, udp_opts ) < 0 ) { fprintf( stderr, "[rtp] Could not create udp output" ); return -1; } p_rtp->ssrc = av_get_random_seed(); *p_handle = p_rtp; return 0; }
static int config_input(AVFilterLink *inlink) { AVFilterContext *ctx = inlink->dst; DrawTextContext *s = ctx->priv; int ret; ff_draw_init(&s->dc, inlink->format, FF_DRAW_PROCESS_ALPHA); ff_draw_color(&s->dc, &s->fontcolor, s->fontcolor.rgba); ff_draw_color(&s->dc, &s->shadowcolor, s->shadowcolor.rgba); ff_draw_color(&s->dc, &s->bordercolor, s->bordercolor.rgba); ff_draw_color(&s->dc, &s->boxcolor, s->boxcolor.rgba); s->var_values[VAR_w] = s->var_values[VAR_W] = s->var_values[VAR_MAIN_W] = inlink->w; s->var_values[VAR_h] = s->var_values[VAR_H] = s->var_values[VAR_MAIN_H] = inlink->h; s->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ? av_q2d(inlink->sample_aspect_ratio) : 1; s->var_values[VAR_DAR] = (double)inlink->w / inlink->h * s->var_values[VAR_SAR]; s->var_values[VAR_HSUB] = 1 << s->dc.hsub_max; s->var_values[VAR_VSUB] = 1 << s->dc.vsub_max; s->var_values[VAR_X] = NAN; s->var_values[VAR_Y] = NAN; s->var_values[VAR_T] = NAN; av_lfg_init(&s->prng, av_get_random_seed()); av_expr_free(s->x_pexpr); av_expr_free(s->y_pexpr); s->x_pexpr = s->y_pexpr = NULL; if ((ret = av_expr_parse(&s->x_pexpr, s->x_expr, var_names, NULL, NULL, fun2_names, fun2, 0, ctx)) < 0 || (ret = av_expr_parse(&s->y_pexpr, s->y_expr, var_names, NULL, NULL, fun2_names, fun2, 0, ctx)) < 0 || (ret = av_expr_parse(&s->a_pexpr, s->a_expr, var_names, NULL, NULL, fun2_names, fun2, 0, ctx)) < 0) return AVERROR(EINVAL); return 0; }
static int rtp_write_header(AVFormatContext *s1) { RTPMuxContext *s = s1->priv_data; int n, ret = AVERROR(EINVAL); AVStream *st; if (s1->nb_streams != 1) { av_log(s1, AV_LOG_ERROR, "Only one stream supported in the RTP muxer\n"); return AVERROR(EINVAL); } st = s1->streams[0]; if (!is_supported(st->codec->codec_id)) { av_log(s1, AV_LOG_ERROR, "Unsupported codec %s\n", avcodec_get_name(st->codec->codec_id)); return -1; } if (s->payload_type < 0) { /* Re-validate non-dynamic payload types */ if (st->id < RTP_PT_PRIVATE) st->id = ff_rtp_get_payload_type(s1, st->codec, -1); s->payload_type = st->id; } else { /* private option takes priority */ st->id = s->payload_type; } s->base_timestamp = av_get_random_seed(); s->timestamp = s->base_timestamp; s->cur_timestamp = 0; if (!s->ssrc) s->ssrc = av_get_random_seed(); s->first_packet = 1; s->first_rtcp_ntp_time = ff_ntp_time(); if (s1->start_time_realtime != 0 && s1->start_time_realtime != AV_NOPTS_VALUE) /* Round the NTP time to whole milliseconds. */ s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 + NTP_OFFSET_US; // Pick a random sequence start number, but in the lower end of the // available range, so that any wraparound doesn't happen immediately. // (Immediate wraparound would be an issue for SRTP.) if (s->seq < 0) { if (s1->flags & AVFMT_FLAG_BITEXACT) { s->seq = 0; } else s->seq = av_get_random_seed() & 0x0fff; } else s->seq &= 0xffff; // Use the given parameter, wrapped to the right interval if (s1->packet_size) { if (s1->pb->max_packet_size) s1->packet_size = FFMIN(s1->packet_size, s1->pb->max_packet_size); } else s1->packet_size = s1->pb->max_packet_size; if (s1->packet_size <= 12) { av_log(s1, AV_LOG_ERROR, "Max packet size %d too low\n", s1->packet_size); return AVERROR(EIO); } s->buf = av_malloc(s1->packet_size); if (!s->buf) { return AVERROR(ENOMEM); } s->max_payload_size = s1->packet_size - 12; if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { avpriv_set_pts_info(st, 32, 1, st->codec->sample_rate); } else { avpriv_set_pts_info(st, 32, 1, 90000); } s->buf_ptr = s->buf; switch(st->codec->codec_id) { case AV_CODEC_ID_MP2: case AV_CODEC_ID_MP3: s->buf_ptr = s->buf + 4; avpriv_set_pts_info(st, 32, 1, 90000); break; case AV_CODEC_ID_MPEG1VIDEO: case AV_CODEC_ID_MPEG2VIDEO: break; case AV_CODEC_ID_MPEG2TS: n = s->max_payload_size / TS_PACKET_SIZE; if (n < 1) n = 1; s->max_payload_size = n * TS_PACKET_SIZE; break; case AV_CODEC_ID_H261: if (s1->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) { av_log(s, AV_LOG_ERROR, "Packetizing H261 is experimental and produces incorrect " "packetization for cases where GOBs don't fit into packets " "(even though most receivers may handle it just fine). " "Please set -f_strict experimental in order to enable it.\n"); ret = AVERROR_EXPERIMENTAL; goto fail; } break; case AV_CODEC_ID_H264: /* check for H.264 MP4 syntax */ if (st->codec->extradata_size > 4 && st->codec->extradata[0] == 1) { s->nal_length_size = (st->codec->extradata[4] & 0x03) + 1; } break; case AV_CODEC_ID_HEVC: /* Only check for the standardized hvcC version of extradata, keeping * things simple and similar to the avcC/H264 case above, instead * of trying to handle the pre-standardization versions (as in * libavcodec/hevc.c). */ if (st->codec->extradata_size > 21 && st->codec->extradata[0] == 1) { s->nal_length_size = (st->codec->extradata[21] & 0x03) + 1; } break; case AV_CODEC_ID_VORBIS: case AV_CODEC_ID_THEORA: s->max_frames_per_packet = 15; break; case AV_CODEC_ID_ADPCM_G722: /* Due to a historical error, the clock rate for G722 in RTP is * 8000, even if the sample rate is 16000. See RFC 3551. */ avpriv_set_pts_info(st, 32, 1, 8000); break; case AV_CODEC_ID_OPUS: if (st->codec->channels > 2) { av_log(s1, AV_LOG_ERROR, "Multistream opus not supported in RTP\n"); goto fail; } /* The opus RTP RFC says that all opus streams should use 48000 Hz * as clock rate, since all opus sample rates can be expressed in * this clock rate, and sample rate changes on the fly are supported. */ avpriv_set_pts_info(st, 32, 1, 48000); break; case AV_CODEC_ID_ILBC: if (st->codec->block_align != 38 && st->codec->block_align != 50) { av_log(s1, AV_LOG_ERROR, "Incorrect iLBC block size specified\n"); goto fail; } s->max_frames_per_packet = s->max_payload_size / st->codec->block_align; break; case AV_CODEC_ID_AMR_NB: case AV_CODEC_ID_AMR_WB: s->max_frames_per_packet = 50; if (st->codec->codec_id == AV_CODEC_ID_AMR_NB) n = 31; else n = 61; /* max_header_toc_size + the largest AMR payload must fit */ if (1 + s->max_frames_per_packet + n > s->max_payload_size) { av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n"); goto fail; } if (st->codec->channels != 1) { av_log(s1, AV_LOG_ERROR, "Only mono is supported\n"); goto fail; } break; case AV_CODEC_ID_AAC: s->max_frames_per_packet = 50; break; default: break; } return 0; fail: av_freep(&s->buf); return ret; }
static int rtsp_read_setup(AVFormatContext *s, char* host, char *controlurl) { RTSPState *rt = s->priv_data; RTSPMessageHeader request = { 0 }; int ret = 0; char url[1024]; RTSPStream *rtsp_st; char responseheaders[1024]; int localport = -1; int transportidx = 0; int streamid = 0; ret = rtsp_read_request(s, &request, "SETUP"); if (ret) return ret; rt->seq++; if (!request.nb_transports) { av_log(s, AV_LOG_ERROR, "No transport defined in SETUP\n"); return AVERROR_INVALIDDATA; } for (transportidx = 0; transportidx < request.nb_transports; transportidx++) { if (!request.transports[transportidx].mode_record || (request.transports[transportidx].lower_transport != RTSP_LOWER_TRANSPORT_UDP && request.transports[transportidx].lower_transport != RTSP_LOWER_TRANSPORT_TCP)) { av_log(s, AV_LOG_ERROR, "mode=record/receive not set or transport" " protocol not supported (yet)\n"); return AVERROR_INVALIDDATA; } } if (request.nb_transports > 1) av_log(s, AV_LOG_WARNING, "More than one transport not supported, " "using first of all\n"); for (streamid = 0; streamid < rt->nb_rtsp_streams; streamid++) { if (!strcmp(rt->rtsp_streams[streamid]->control_url, controlurl)) break; } if (streamid == rt->nb_rtsp_streams) { av_log(s, AV_LOG_ERROR, "Unable to find requested track\n"); return AVERROR_STREAM_NOT_FOUND; } rtsp_st = rt->rtsp_streams[streamid]; localport = rt->rtp_port_min; if (request.transports[0].lower_transport == RTSP_LOWER_TRANSPORT_TCP) { rt->lower_transport = RTSP_LOWER_TRANSPORT_TCP; if ((ret = ff_rtsp_open_transport_ctx(s, rtsp_st))) { rtsp_send_reply(s, RTSP_STATUS_TRANSPORT, NULL, request.seq); return ret; } rtsp_st->interleaved_min = request.transports[0].interleaved_min; rtsp_st->interleaved_max = request.transports[0].interleaved_max; snprintf(responseheaders, sizeof(responseheaders), "Transport: " "RTP/AVP/TCP;unicast;mode=receive;interleaved=%d-%d" "\r\n", request.transports[0].interleaved_min, request.transports[0].interleaved_max); } else { do { ff_url_join(url, sizeof(url), "rtp", NULL, host, localport, NULL); av_dlog(s, "Opening: %s", url); ret = ffurl_open(&rtsp_st->rtp_handle, url, AVIO_FLAG_READ_WRITE, &s->interrupt_callback, NULL); if (ret) localport += 2; } while (ret || localport > rt->rtp_port_max); if (localport > rt->rtp_port_max) { rtsp_send_reply(s, RTSP_STATUS_TRANSPORT, NULL, request.seq); return ret; } av_dlog(s, "Listening on: %d", ff_rtp_get_local_rtp_port(rtsp_st->rtp_handle)); if ((ret = ff_rtsp_open_transport_ctx(s, rtsp_st))) { rtsp_send_reply(s, RTSP_STATUS_TRANSPORT, NULL, request.seq); return ret; } localport = ff_rtp_get_local_rtp_port(rtsp_st->rtp_handle); snprintf(responseheaders, sizeof(responseheaders), "Transport: " "RTP/AVP/UDP;unicast;mode=receive;source=%s;" "client_port=%d-%d;server_port=%d-%d\r\n", host, request.transports[0].client_port_min, request.transports[0].client_port_max, localport, localport + 1); } /* Establish sessionid if not previously set */ /* Put this in a function? */ /* RFC 2326: session id must be at least 8 digits */ while (strlen(rt->session_id) < 8) av_strlcatf(rt->session_id, 512, "%u", av_get_random_seed()); av_strlcatf(responseheaders, sizeof(responseheaders), "Session: %s\r\n", rt->session_id); /* Send Reply */ rtsp_send_reply(s, RTSP_STATUS_OK, responseheaders, request.seq); rt->state = RTSP_STATE_PAUSED; return 0; }
static int rtp_write_header(AVFormatContext *s1) { RTPMuxContext *s = s1->priv_data; int max_packet_size, n; AVStream *st; if (s1->nb_streams != 1) return -1; st = s1->streams[0]; if (!is_supported(st->codec->codec_id)) { av_log(s1, AV_LOG_ERROR, "Unsupported codec %x\n", st->codec->codec_id); return -1; } s->payload_type = ff_rtp_get_payload_type(st->codec); if (s->payload_type < 0) s->payload_type = RTP_PT_PRIVATE + (st->codec->codec_type == AVMEDIA_TYPE_AUDIO); s->base_timestamp = av_get_random_seed(); s->timestamp = s->base_timestamp; s->cur_timestamp = 0; s->ssrc = av_get_random_seed(); s->first_packet = 1; s->first_rtcp_ntp_time = ff_ntp_time(); if (s1->start_time_realtime) /* Round the NTP time to whole milliseconds. */ s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 + NTP_OFFSET_US; max_packet_size = url_fget_max_packet_size(s1->pb); if (max_packet_size <= 12) return AVERROR(EIO); s->buf = av_malloc(max_packet_size); if (s->buf == NULL) { return AVERROR(ENOMEM); } s->max_payload_size = max_packet_size - 12; s->max_frames_per_packet = 0; if (s1->max_delay) { if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { if (st->codec->frame_size == 0) { av_log(s1, AV_LOG_ERROR, "Cannot respect max delay: frame size = 0\n"); } else { s->max_frames_per_packet = av_rescale_rnd(s1->max_delay, st->codec->sample_rate, AV_TIME_BASE * st->codec->frame_size, AV_ROUND_DOWN); } } if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { /* FIXME: We should round down here... */ s->max_frames_per_packet = av_rescale_q(s1->max_delay, (AVRational){1, 1000000}, st->codec->time_base); } } av_set_pts_info(st, 32, 1, 90000); switch(st->codec->codec_id) { case CODEC_ID_MP2: case CODEC_ID_MP3: s->buf_ptr = s->buf + 4; break; case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG2VIDEO: break; case CODEC_ID_MPEG2TS: n = s->max_payload_size / TS_PACKET_SIZE; if (n < 1) n = 1; s->max_payload_size = n * TS_PACKET_SIZE; s->buf_ptr = s->buf; break; case CODEC_ID_H264: /* check for H.264 MP4 syntax */ if (st->codec->extradata_size > 4 && st->codec->extradata[0] == 1) { s->nal_length_size = (st->codec->extradata[4] & 0x03) + 1; } break; case CODEC_ID_AMR_NB: case CODEC_ID_AMR_WB: if (!s->max_frames_per_packet) s->max_frames_per_packet = 12; if (st->codec->codec_id == CODEC_ID_AMR_NB) n = 31; else n = 61; /* max_header_toc_size + the largest AMR payload must fit */ if (1 + s->max_frames_per_packet + n > s->max_payload_size) { av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n"); return -1; } if (st->codec->channels != 1) { av_log(s1, AV_LOG_ERROR, "Only mono is supported\n"); return -1; } case CODEC_ID_AAC: s->num_frames = 0; default: if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { av_set_pts_info(st, 32, 1, st->codec->sample_rate); } s->buf_ptr = s->buf; break; } return 0; }
/* Generate a digest reply, according to RFC 2617. */ static char *make_digest_auth(HTTPAuthState *state, const char *username, const char *password, const char *uri, const char *method) { DigestParams *digest = &state->digest_params; int len; uint32_t cnonce_buf[2]; char cnonce[17]; char nc[9]; int i; char A1hash[33], A2hash[33], response[33]; struct AVMD5 *md5ctx; uint8_t hash[16]; char *authstr; digest->nc++; snprintf(nc, sizeof(nc), "%08x", digest->nc); /* Generate a client nonce. */ for (i = 0; i < 2; i++) cnonce_buf[i] = av_get_random_seed(); ff_data_to_hex(cnonce, (const uint8_t*) cnonce_buf, sizeof(cnonce_buf), 1); cnonce[2*sizeof(cnonce_buf)] = 0; md5ctx = av_md5_alloc(); if (!md5ctx) return NULL; av_md5_init(md5ctx); update_md5_strings(md5ctx, username, ":", state->realm, ":", password, NULL); av_md5_final(md5ctx, hash); ff_data_to_hex(A1hash, hash, 16, 1); A1hash[32] = 0; if (!strcmp(digest->algorithm, "") || !strcmp(digest->algorithm, "MD5")) { } else if (!strcmp(digest->algorithm, "MD5-sess")) { av_md5_init(md5ctx); update_md5_strings(md5ctx, A1hash, ":", digest->nonce, ":", cnonce, NULL); av_md5_final(md5ctx, hash); ff_data_to_hex(A1hash, hash, 16, 1); A1hash[32] = 0; } else { /* Unsupported algorithm */ av_free(md5ctx); return NULL; } av_md5_init(md5ctx); update_md5_strings(md5ctx, method, ":", uri, NULL); av_md5_final(md5ctx, hash); ff_data_to_hex(A2hash, hash, 16, 1); A2hash[32] = 0; av_md5_init(md5ctx); update_md5_strings(md5ctx, A1hash, ":", digest->nonce, NULL); if (!strcmp(digest->qop, "auth") || !strcmp(digest->qop, "auth-int")) { update_md5_strings(md5ctx, ":", nc, ":", cnonce, ":", digest->qop, NULL); } update_md5_strings(md5ctx, ":", A2hash, NULL); av_md5_final(md5ctx, hash); ff_data_to_hex(response, hash, 16, 1); response[32] = 0; av_free(md5ctx); if (!strcmp(digest->qop, "") || !strcmp(digest->qop, "auth")) { } else if (!strcmp(digest->qop, "auth-int")) { /* qop=auth-int not supported */ return NULL; } else { /* Unsupported qop value. */ return NULL; } len = strlen(username) + strlen(state->realm) + strlen(digest->nonce) + strlen(uri) + strlen(response) + strlen(digest->algorithm) + strlen(digest->opaque) + strlen(digest->qop) + strlen(cnonce) + strlen(nc) + 150; authstr = av_malloc(len); if (!authstr) return NULL; snprintf(authstr, len, "Authorization: Digest "); /* TODO: Escape the quoted strings properly. */ av_strlcatf(authstr, len, "username=\"%s\"", username); av_strlcatf(authstr, len, ", realm=\"%s\"", state->realm); av_strlcatf(authstr, len, ", nonce=\"%s\"", digest->nonce); av_strlcatf(authstr, len, ", uri=\"%s\"", uri); av_strlcatf(authstr, len, ", response=\"%s\"", response); if (digest->algorithm[0]) av_strlcatf(authstr, len, ", algorithm=%s", digest->algorithm); if (digest->opaque[0]) av_strlcatf(authstr, len, ", opaque=\"%s\"", digest->opaque); if (digest->qop[0]) { av_strlcatf(authstr, len, ", qop=\"%s\"", digest->qop); av_strlcatf(authstr, len, ", cnonce=\"%s\"", cnonce); av_strlcatf(authstr, len, ", nc=%s", nc); } av_strlcatf(authstr, len, "\r\n"); return authstr; }
uint32_t ff_random_get_seed(void) { return av_get_random_seed(); }
static int sap_write_header(AVFormatContext *s) { struct SAPState *sap = s->priv_data; char host[1024], path[1024], url[1024], announce_addr[50] = ""; char *option_list; int port = 9875, base_port = 5004, i, pos = 0, same_port = 0, ttl = 255; AVFormatContext **contexts = NULL; int ret = 0; struct sockaddr_storage localaddr; socklen_t addrlen = sizeof(localaddr); int udp_fd; if (!ff_network_init()) return AVERROR(EIO); /* extract hostname and port */ av_url_split(NULL, 0, NULL, 0, host, sizeof(host), &base_port, path, sizeof(path), s->filename); if (base_port < 0) base_port = 5004; /* search for options */ option_list = strrchr(path, '?'); if (option_list) { char buf[50]; if (av_find_info_tag(buf, sizeof(buf), "announce_port", option_list)) { port = strtol(buf, NULL, 10); } if (av_find_info_tag(buf, sizeof(buf), "same_port", option_list)) { same_port = strtol(buf, NULL, 10); } if (av_find_info_tag(buf, sizeof(buf), "ttl", option_list)) { ttl = strtol(buf, NULL, 10); } if (av_find_info_tag(buf, sizeof(buf), "announce_addr", option_list)) { av_strlcpy(announce_addr, buf, sizeof(announce_addr)); } } if (!announce_addr[0]) { struct addrinfo hints = { 0 }, *ai = NULL; hints.ai_family = AF_UNSPEC; if (getaddrinfo(host, NULL, &hints, &ai)) { av_log(s, AV_LOG_ERROR, "Unable to resolve %s\n", host); ret = AVERROR(EIO); goto fail; } if (ai->ai_family == AF_INET) { /* Also known as sap.mcast.net */ av_strlcpy(announce_addr, "224.2.127.254", sizeof(announce_addr)); #if HAVE_STRUCT_SOCKADDR_IN6 } else if (ai->ai_family == AF_INET6) { /* With IPv6, you can use the same destination in many different * multicast subnets, to choose how far you want it routed. * This one is intended to be routed globally. */ av_strlcpy(announce_addr, "ff0e::2:7ffe", sizeof(announce_addr)); #endif } else { freeaddrinfo(ai); av_log(s, AV_LOG_ERROR, "Host %s resolved to unsupported " "address family\n", host); ret = AVERROR(EIO); goto fail; } freeaddrinfo(ai); } contexts = av_mallocz(sizeof(AVFormatContext*) * s->nb_streams); if (!contexts) { ret = AVERROR(ENOMEM); goto fail; } s->start_time_realtime = av_gettime(); for (i = 0; i < s->nb_streams; i++) { URLContext *fd; ff_url_join(url, sizeof(url), "rtp", NULL, host, base_port, "?ttl=%d", ttl); if (!same_port) base_port += 2; ret = ffurl_open(&fd, url, AVIO_FLAG_WRITE, &s->interrupt_callback, NULL); if (ret) { ret = AVERROR(EIO); goto fail; } s->streams[i]->priv_data = contexts[i] = ff_rtp_chain_mux_open(s, s->streams[i], fd, 0); av_strlcpy(contexts[i]->filename, url, sizeof(contexts[i]->filename)); } ff_url_join(url, sizeof(url), "udp", NULL, announce_addr, port, "?ttl=%d&connect=1", ttl); ret = ffurl_open(&sap->ann_fd, url, AVIO_FLAG_WRITE, &s->interrupt_callback, NULL); if (ret) { ret = AVERROR(EIO); goto fail; } udp_fd = ffurl_get_file_handle(sap->ann_fd); if (getsockname(udp_fd, (struct sockaddr*) &localaddr, &addrlen)) { ret = AVERROR(EIO); goto fail; } if (localaddr.ss_family != AF_INET #if HAVE_STRUCT_SOCKADDR_IN6 && localaddr.ss_family != AF_INET6 #endif ) { av_log(s, AV_LOG_ERROR, "Unsupported protocol family\n"); ret = AVERROR(EIO); goto fail; } sap->ann_size = 8192; sap->ann = av_mallocz(sap->ann_size); if (!sap->ann) { ret = AVERROR(EIO); goto fail; } sap->ann[pos] = (1 << 5); #if HAVE_STRUCT_SOCKADDR_IN6 if (localaddr.ss_family == AF_INET6) sap->ann[pos] |= 0x10; #endif pos++; sap->ann[pos++] = 0; /* Authentication length */ AV_WB16(&sap->ann[pos], av_get_random_seed()); pos += 2; if (localaddr.ss_family == AF_INET) { memcpy(&sap->ann[pos], &((struct sockaddr_in*)&localaddr)->sin_addr, sizeof(struct in_addr)); pos += sizeof(struct in_addr); #if HAVE_STRUCT_SOCKADDR_IN6 } else { memcpy(&sap->ann[pos], &((struct sockaddr_in6*)&localaddr)->sin6_addr, sizeof(struct in6_addr)); pos += sizeof(struct in6_addr); #endif } av_strlcpy(&sap->ann[pos], "application/sdp", sap->ann_size - pos); pos += strlen(&sap->ann[pos]) + 1; if (av_sdp_create(contexts, s->nb_streams, &sap->ann[pos], sap->ann_size - pos)) { ret = AVERROR_INVALIDDATA; goto fail; } av_freep(&contexts); av_log(s, AV_LOG_VERBOSE, "SDP:\n%s\n", &sap->ann[pos]); pos += strlen(&sap->ann[pos]); sap->ann_size = pos; if (sap->ann_size > sap->ann_fd->max_packet_size) { av_log(s, AV_LOG_ERROR, "Announcement too large to send in one " "packet\n"); goto fail; } return 0; fail: av_free(contexts); sap_write_close(s); return ret; }
static int rtp_write_header(AVFormatContext *s1) { RTPMuxContext *s = s1->priv_data; int max_packet_size, n; AVStream *st; if (s1->nb_streams != 1) return -1; st = s1->streams[0]; if (!is_supported(st->codec->codec_id)) { av_log(s1, AV_LOG_ERROR, "Unsupported codec %x\n", st->codec->codec_id); return -1; } if (s->payload_type < 0) s->payload_type = ff_rtp_get_payload_type(s1, st->codec); s->base_timestamp = av_get_random_seed(); s->timestamp = s->base_timestamp; s->cur_timestamp = 0; s->ssrc = av_get_random_seed(); s->first_packet = 1; s->first_rtcp_ntp_time = ff_ntp_time(); if (s1->start_time_realtime) /* Round the NTP time to whole milliseconds. */ s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 + NTP_OFFSET_US; max_packet_size = s1->pb->max_packet_size; if (max_packet_size <= 12) return AVERROR(EIO); s->buf = av_malloc(max_packet_size); if (s->buf == NULL) { return AVERROR(ENOMEM); } s->max_payload_size = max_packet_size - 12; s->max_frames_per_packet = 0; if (s1->max_delay) { if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { if (st->codec->frame_size == 0) { av_log(s1, AV_LOG_ERROR, "Cannot respect max delay: frame size = 0\n"); } else { s->max_frames_per_packet = av_rescale_rnd(s1->max_delay, st->codec->sample_rate, AV_TIME_BASE * st->codec->frame_size, AV_ROUND_DOWN); } } if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { /* FIXME: We should round down here... */ s->max_frames_per_packet = av_rescale_q(s1->max_delay, (AVRational){1, 1000000}, st->codec->time_base); } } avpriv_set_pts_info(st, 32, 1, 90000); switch(st->codec->codec_id) { case CODEC_ID_MP2: case CODEC_ID_MP3: s->buf_ptr = s->buf + 4; break; case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG2VIDEO: break; case CODEC_ID_MPEG2TS: n = s->max_payload_size / TS_PACKET_SIZE; if (n < 1) n = 1; s->max_payload_size = n * TS_PACKET_SIZE; s->buf_ptr = s->buf; break; case CODEC_ID_H264: /* check for H.264 MP4 syntax */ if (st->codec->extradata_size > 4 && st->codec->extradata[0] == 1) { s->nal_length_size = (st->codec->extradata[4] & 0x03) + 1; } break; case CODEC_ID_VORBIS: case CODEC_ID_THEORA: if (!s->max_frames_per_packet) s->max_frames_per_packet = 15; s->max_frames_per_packet = av_clip(s->max_frames_per_packet, 1, 15); s->max_payload_size -= 6; // ident+frag+tdt/vdt+pkt_num+pkt_length s->num_frames = 0; goto defaultcase; case CODEC_ID_VP8: av_log(s1, AV_LOG_ERROR, "RTP VP8 payload implementation is " "incompatible with the latest spec drafts.\n"); break; case CODEC_ID_ADPCM_G722: /* Due to a historical error, the clock rate for G722 in RTP is * 8000, even if the sample rate is 16000. See RFC 3551. */ avpriv_set_pts_info(st, 32, 1, 8000); break; case CODEC_ID_AMR_NB: case CODEC_ID_AMR_WB: if (!s->max_frames_per_packet) s->max_frames_per_packet = 12; if (st->codec->codec_id == CODEC_ID_AMR_NB) n = 31; else n = 61; /* max_header_toc_size + the largest AMR payload must fit */ if (1 + s->max_frames_per_packet + n > s->max_payload_size) { av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n"); return -1; } if (st->codec->channels != 1) { av_log(s1, AV_LOG_ERROR, "Only mono is supported\n"); return -1; } case CODEC_ID_AAC: s->num_frames = 0; default: defaultcase: if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { avpriv_set_pts_info(st, 32, 1, st->codec->sample_rate); } s->buf_ptr = s->buf; break; } return 0; }
$NetBSD$ --- ffserver.c.orig 2013-01-23 16:52:39.000000000 +0000 +++ ffserver.c @@ -4700,7 +4700,10 @@ int main(int argc, char **argv) av_lfg_init(&random_state, av_get_random_seed()); sigact.sa_handler = handle_child_exit; - sigact.sa_flags = SA_NOCLDSTOP | SA_RESTART; + sigact.sa_flags = SA_NOCLDSTOP; +#ifdef SA_RESTART + sigact.sa_flags |= SA_RESTART; +#endif sigaction(SIGCHLD, &sigact, 0); if (parse_ffconfig(config_filename) < 0) {
static int rtp_write_header(AVFormatContext *s1) { RTPMuxContext *s = s1->priv_data; int n; AVStream *st; if (s1->nb_streams != 1) { av_log(s1, AV_LOG_ERROR, "Only one stream supported in the RTP muxer\n"); return AVERROR(EINVAL); } st = s1->streams[0]; if (!is_supported(st->codec->codec_id)) { av_log(s1, AV_LOG_ERROR, "Unsupported codec %s\n", avcodec_get_name(st->codec->codec_id)); return -1; } if (s->payload_type < 0) { /* Re-validate non-dynamic payload types */ if (st->id < RTP_PT_PRIVATE) st->id = ff_rtp_get_payload_type(s1, st->codec, -1); s->payload_type = st->id; } else { /* private option takes priority */ st->id = s->payload_type; } s->base_timestamp = av_get_random_seed(); s->timestamp = s->base_timestamp; s->cur_timestamp = 0; if (!s->ssrc) s->ssrc = av_get_random_seed(); s->first_packet = 1; s->first_rtcp_ntp_time = ff_ntp_time(); if (s1->start_time_realtime) /* Round the NTP time to whole milliseconds. */ s->first_rtcp_ntp_time = (s1->start_time_realtime / 1000) * 1000 + NTP_OFFSET_US; // Pick a random sequence start number, but in the lower end of the // available range, so that any wraparound doesn't happen immediately. // (Immediate wraparound would be an issue for SRTP.) if (s->seq < 0) { if (st->codec->flags & CODEC_FLAG_BITEXACT) { s->seq = 0; } else s->seq = av_get_random_seed() & 0x0fff; } else s->seq &= 0xffff; // Use the given parameter, wrapped to the right interval if (s1->packet_size) { if (s1->pb->max_packet_size) s1->packet_size = FFMIN(s1->packet_size, s1->pb->max_packet_size); } else s1->packet_size = s1->pb->max_packet_size; if (s1->packet_size <= 12) { av_log(s1, AV_LOG_ERROR, "Max packet size %d too low\n", s1->packet_size); return AVERROR(EIO); } s->buf = av_malloc(s1->packet_size); if (s->buf == NULL) { return AVERROR(ENOMEM); } s->max_payload_size = s1->packet_size - 12; s->max_frames_per_packet = 0; if (s1->max_delay > 0) { if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { int frame_size = av_get_audio_frame_duration(st->codec, 0); if (!frame_size) frame_size = st->codec->frame_size; if (frame_size == 0) { av_log(s1, AV_LOG_ERROR, "Cannot respect max delay: frame size = 0\n"); } else { AVRational avr1 ={ frame_size, st->codec->sample_rate }; s->max_frames_per_packet = av_rescale_q_rnd(s1->max_delay,AV_TIME_BASE_Q,avr1, AV_ROUND_DOWN); } } if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { /* FIXME: We should round down here... */ AVRational avr2 = {1, 1000000}; s->max_frames_per_packet = av_rescale_q(s1->max_delay, avr2, st->codec->time_base); } } avpriv_set_pts_info(st, 32, 1, 90000); switch(st->codec->codec_id) { case AV_CODEC_ID_MP2: case AV_CODEC_ID_MP3: s->buf_ptr = s->buf + 4; break; case AV_CODEC_ID_MPEG1VIDEO: case AV_CODEC_ID_MPEG2VIDEO: break; case AV_CODEC_ID_MPEG2TS: n = s->max_payload_size / TS_PACKET_SIZE; if (n < 1) n = 1; s->max_payload_size = n * TS_PACKET_SIZE; s->buf_ptr = s->buf; break; case AV_CODEC_ID_H264: /* check for H.264 MP4 syntax */ if (st->codec->extradata_size > 4 && st->codec->extradata[0] == 1) { s->nal_length_size = (st->codec->extradata[4] & 0x03) + 1; } break; case AV_CODEC_ID_VORBIS: case AV_CODEC_ID_THEORA: if (!s->max_frames_per_packet) s->max_frames_per_packet = 15; s->max_frames_per_packet = av_clip(s->max_frames_per_packet, 1, 15); s->max_payload_size -= 6; // ident+frag+tdt/vdt+pkt_num+pkt_length s->num_frames = 0; goto defaultcase; case AV_CODEC_ID_ADPCM_G722: /* Due to a historical error, the clock rate for G722 in RTP is * 8000, even if the sample rate is 16000. See RFC 3551. */ avpriv_set_pts_info(st, 32, 1, 8000); break; case AV_CODEC_ID_OPUS: if (st->codec->channels > 2) { av_log(s1, AV_LOG_ERROR, "Multistream opus not supported in RTP\n"); goto fail; } /* The opus RTP RFC says that all opus streams should use 48000 Hz * as clock rate, since all opus sample rates can be expressed in * this clock rate, and sample rate changes on the fly are supported. */ avpriv_set_pts_info(st, 32, 1, 48000); break; case AV_CODEC_ID_ILBC: if (st->codec->block_align != 38 && st->codec->block_align != 50) { av_log(s1, AV_LOG_ERROR, "Incorrect iLBC block size specified\n"); goto fail; } if (!s->max_frames_per_packet) s->max_frames_per_packet = 1; s->max_frames_per_packet = FFMIN(s->max_frames_per_packet, s->max_payload_size / st->codec->block_align); goto defaultcase; case AV_CODEC_ID_AMR_NB: case AV_CODEC_ID_AMR_WB: if (!s->max_frames_per_packet) s->max_frames_per_packet = 12; if (st->codec->codec_id == AV_CODEC_ID_AMR_NB) n = 31; else n = 61; /* max_header_toc_size + the largest AMR payload must fit */ if (1 + s->max_frames_per_packet + n > s->max_payload_size) { av_log(s1, AV_LOG_ERROR, "RTP max payload size too small for AMR\n"); goto fail; } if (st->codec->channels != 1) { av_log(s1, AV_LOG_ERROR, "Only mono is supported\n"); goto fail; } case AV_CODEC_ID_AAC: s->num_frames = 0; default: defaultcase: if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) { avpriv_set_pts_info(st, 32, 1, st->codec->sample_rate); } s->buf_ptr = s->buf; break; } return 0; fail: av_freep(&s->buf); return AVERROR(EINVAL); }