Пример #1
0
static int
per_client_init(RTSPContext *ctx) {
	int i;
	AVOutputFormat *fmt;
	//
	if((fmt = av_guess_format("rtp", NULL, NULL)) == NULL) {
		ga_error("RTP not supported.\n");
		return -1;
	}
	if((ctx->sdp_fmtctx = avformat_alloc_context()) == NULL) {
		ga_error("create avformat context failed.\n");
		return -1;
	}
	ctx->sdp_fmtctx->oformat = fmt;
	// video stream
	for(i = 0; i < video_source_channels(); i++) {
		if((ctx->sdp_vstream[i] = ga_avformat_new_stream(
			ctx->sdp_fmtctx,
			i, rtspconf->video_encoder_codec)) == NULL) {
			//
			ga_error("cannot create new video stream (%d:%d)\n",
				i, rtspconf->video_encoder_codec->id);
			return -1;
		}
		if((ctx->sdp_vencoder[i] = ga_avcodec_vencoder_init(
			ctx->sdp_vstream[i]->codec,
			rtspconf->video_encoder_codec,
			video_source_out_width(i), video_source_out_height(i),
			rtspconf->video_fps,
			rtspconf->vso)) == NULL) {
			//
			ga_error("cannot init video encoder\n");
			return -1;
		}
	}
	// audio stream
#ifdef ENABLE_AUDIO
	if((ctx->sdp_astream = ga_avformat_new_stream(
			ctx->sdp_fmtctx,
			video_source_channels(),
			rtspconf->audio_encoder_codec)) == NULL) {
		ga_error("cannot create new audio stream (%d)\n",
			rtspconf->audio_encoder_codec->id);
		return -1;
	}
	if((ctx->sdp_aencoder = ga_avcodec_aencoder_init(
			ctx->sdp_astream->codec,
			rtspconf->audio_encoder_codec,
			rtspconf->audio_bitrate,
			rtspconf->audio_samplerate,
			rtspconf->audio_channels,
			rtspconf->audio_codec_format,
			rtspconf->audio_codec_channel_layout)) == NULL) {
		ga_error("cannot init audio encoder\n");
		return -1;
	}
#endif
	if((ctx->mtu = ga_conf_readint("packet-size")) <= 0)
		ctx->mtu = RTSP_TCP_MAX_PACKET_SIZE;
	//
	return 0;
}
Пример #2
0
int
asource_init(void *arg) {
	int delay = 0;
	struct RTSPConf *rtspconf = rtspconf_global();
	if(initialized)
		return 0;
	//
	if((delay = ga_conf_readint("audio-init-delay")) > 0) {
		usleep(delay*1000);
	}
	//
	audioparam.channels = rtspconf->audio_channels;
	audioparam.samplerate = rtspconf->audio_samplerate;
	if(rtspconf->audio_device_format == AV_SAMPLE_FMT_S16) {
#ifdef WIN32
#else
		audioparam.format = SND_PCM_FORMAT_S16_LE;
#endif
		audioparam.bits_per_sample = 16;
	} else {
		ga_error("audio source: unsupported audio format (%d).\n",
			rtspconf->audio_device_format);
		return -1;
	}
	if(rtspconf->audio_device_channel_layout != AV_CH_LAYOUT_STEREO) {
		ga_error("audio source: unsupported channel layout (%llu).\n",
			rtspconf->audio_device_channel_layout);
		return -1;
	}
#ifdef WIN32
	if(ga_wasapi_init(&audioparam) < 0) {
		ga_error("WASAPI: initialization failed.\n");
		return -1;
	}
#else
	if((audioparam.handle = ga_alsa_init(&audioparam.sndlog)) == NULL) {
		ga_error("ALSA: initialization failed.\n");
		return -1;
	}
	if(ga_alsa_set_param(&audioparam) < 0) {
		ga_alsa_close(audioparam.handle, audioparam.sndlog);
		ga_error("ALSA: cannot set parameters\n");
		return -1;
	}
#endif
	if(audio_source_setup(audioparam.chunk_size, audioparam.samplerate, audioparam.bits_per_sample, audioparam.channels) < 0) {
		ga_error("audio source: setup failed.\n");
#ifdef WIN32
		ga_wasapi_close(&audioparam);
#else
		ga_alsa_close(audioparam.handle, audioparam.sndlog);
#endif
		return -1;
	}
	initialized = true;
	ga_error("audio source: setup chunk=%d, samplerate=%d, bps=%d, channels=%d\n",
		audioparam.chunk_size,
		audioparam.samplerate,
		audioparam.bits_per_sample,
		audioparam.channels);
	return 0;
}