Exemplo n.º 1
0
sb_module_t *
module_load() {
	static sb_module_t m;
	struct RTSPConf *rtspconf = rtspconf_global();
	//
	bzero(&m, sizeof(m));
	m.type = SB_MODULE_TYPE_VENCODER;
	m.name = strdup("Broadcom-VideoCore-H.264-encoder");
	m.mimetype = strdup("video/H264");
	m.init = vencoder_init;
	m.start = vencoder_start;
	//m.threadproc = vencoder_threadproc;
	m.stop = vencoder_stop;
	m.deinit = vencoder_deinit;
	//
	m.ioctl = vencoder_ioctl;
	return &m;
}
Exemplo n.º 2
0
static int
vencoder_init(void *arg) {
	int iid;
	char *pipefmt = (char*) arg;
	struct RTSPConf *rtspconf = rtspconf_global();
	//
	if(rtspconf == NULL) {
		ga_error("video encoder: no configuration found\n");
		return -1;
	}
	if(vencoder_initialized != 0)
		return 0;
	//
	for(iid = 0; iid < video_source_channels(); iid++) {
		char pipename[64];
		int outputW, outputH;
		pipeline *pipe;
		//
		_sps[iid] = _pps[iid] = NULL;
		_spslen[iid] = _ppslen[iid] = 0;
		snprintf(pipename, sizeof(pipename), pipefmt, iid);
		outputW = video_source_out_width(iid);
		outputH = video_source_out_height(iid);
		if((pipe = pipeline::lookup(pipename)) == NULL) {
			ga_error("video encoder: pipe %s is not found\n", pipename);
			goto init_failed;
		}
		ga_error("video encoder: video source #%d from '%s' (%dx%d).\n",
			iid, pipe->name(), outputW, outputH, iid);
		//
		if(vpu_encoder_init(&vpu[iid], outputW, outputH, rtspconf->video_fps, 1,
				ga_conf_mapreadint("video-specific", "b") / 1000,
				ga_conf_mapreadint("video-specific", "g")) < 0)
			goto init_failed;
	}
	vencoder_initialized = 1;
	ga_error("video encoder: initialized (%d channels).\n", iid);
	return 0;
init_failed:
	vencoder_deinit(NULL);
	return -1;
}
static int
ca_create_swrctx(WAVEFORMATEX *w) {
	struct RTSPConf *rtspconf = rtspconf_global();
	int bufreq, samples;
	//
	if(swrctx != NULL)
		swr_free(&swrctx);
	if(audio_buf != NULL)
		free(audio_buf);
	//
	ga_error("CoreAudio: create swr context - format[%x] freq[%d] channels[%d]\n",
		w->wFormatTag, w->nSamplesPerSec, w->nChannels);
	//
	swrctx = swr_alloc_set_opts(NULL,
		rtspconf->audio_device_channel_layout,
		rtspconf->audio_device_format,
		rtspconf->audio_samplerate,
		CA2SWR_chlayout(w->nChannels),
		CA2SWR_format(w),
		w->nSamplesPerSec, 0, NULL);
	if(swrctx == NULL) {
		ga_error("CoreAudio: cannot create resample context.\n");
		return -1;
	} else {
		ga_error("CoreAudio: resample context (%x,%d,%d) -> (%x,%d,%d)\n",
			(int) CA2SWR_chlayout(w->nChannels),
			(int) CA2SWR_format(w),
			(int) w->nSamplesPerSec,
			(int) rtspconf->audio_device_channel_layout,
			(int) rtspconf->audio_device_format,
			(int) rtspconf->audio_samplerate);
	}
	if(swr_init(swrctx) < 0) {
		swr_free(&swrctx);
		swrctx = NULL;
		ga_error("CoreAudio: resample context init failed.\n");
		return -1;
	}
	// allocate buffer?
	ga_samplerate = rtspconf->audio_samplerate;
	ga_channels = av_get_channel_layout_nb_channels(rtspconf->audio_device_channel_layout);
	ca_samplerate = w->nSamplesPerSec;
	ca_bytes_per_sample = w->wBitsPerSample/8;
	samples = av_rescale_rnd(CA_MAX_SAMPLES,
			rtspconf->audio_samplerate, w->nSamplesPerSec, AV_ROUND_UP);
	bufreq = av_samples_get_buffer_size(NULL,
			rtspconf->audio_channels, samples*2,
			rtspconf->audio_device_format,
			1/*no-alignment*/);
	if((audio_buf = (unsigned char *) malloc(bufreq)) == NULL) {
		ga_error("CoreAudio: cannot allocate resample memory.\n");
		return -1;
	}
	if(audio_source_setup(bufreq, rtspconf->audio_samplerate,
				16/* depends on format */,
				rtspconf->audio_channels) < 0) {
		ga_error("CoreAudio: audio source setup failed.\n");
		return -1;
	}
	ga_error("CoreAudio: max %d samples with %d byte(s) resample buffer allocated.\n",
		samples, bufreq);
	//
	return 0;
}
Exemplo n.º 4
0
void*
rtspserver(void *arg) {
#ifdef WIN32
	SOCKET s = *((SOCKET*) arg);
	int sinlen = sizeof(struct sockaddr_in);
#else
	int s = *((int*) arg);
	socklen_t sinlen = sizeof(struct sockaddr_in);
#endif
	const char *p;
	char buf[8192];
	char cmd[32], url[1024], protocol[32];
	int rlen;
	struct sockaddr_in sin;
	RTSPContext ctx;
	RTSPMessageHeader header1, *header = &header1;
	//int thread_ret;
	// image info
	//int iwidth = video_source_maxwidth(0);
	//int iheight = video_source_maxheight(0);
	//
	rtspconf = rtspconf_global();
	sinlen = sizeof(sin);
	getpeername(s, (struct sockaddr*) &sin, &sinlen);
	//
	bzero(&ctx, sizeof(ctx));
	if(per_client_init(&ctx) < 0) {
		ga_error("server initialization failed.\n");
		return NULL;
	}
	bcopy(&sin, &ctx.client, sizeof(ctx.client));
	ctx.state = SERVER_STATE_IDLE;
	// XXX: hasVideo is used to sync audio/video
	// This value is increased by 1 for each captured frame until it is gerater than zero
	// when this value is greater than zero, audio encoding then starts ...
	//ctx.hasVideo = -(rtspconf->video_fps>>1);	// for slow encoders?
	ctx.hasVideo = 0;	// with 'zerolatency'
	pthread_mutex_init(&ctx.rtsp_writer_mutex, NULL);
	//
	ga_error("[tid %ld] client connected from %s:%d\n",
		ga_gettid(),
		inet_ntoa(sin.sin_addr), htons(sin.sin_port));
	//
	ctx.fd = s;
	//
	do {
		int i, fdmax, active;
		fd_set rfds;
		struct timeval to;
		FD_ZERO(&rfds);
		FD_SET(ctx.fd, &rfds);
		fdmax = ctx.fd;
#ifdef HOLE_PUNCHING
		for(i = 0; i < 2*ctx.streamCount; i++) {
			FD_SET(ctx.rtpSocket[i], &rfds);
			if(ctx.rtpSocket[i] > fdmax)
				fdmax = ctx.rtpSocket[i];
		}
#endif
		to.tv_sec = 0;
		to.tv_usec = 500000;
		if((active = select(fdmax+1, &rfds, NULL, NULL, &to)) < 0) {
			ga_error("select() failed: %s\n", strerror(errno));
			goto quit;
		}
		if(active == 0) {
			// try again!
			continue;
		}
#ifdef HOLE_PUNCHING
		for(i = 0; i < 2*ctx.streamCount; i++) {
			struct sockaddr_in xsin;
#ifdef WIN32
			int xsinlen = sizeof(xsin);
#else
			socklen_t xsinlen = sizeof(xsin);
#endif
			if(FD_ISSET(ctx.rtpSocket[i], &rfds) == 0)
				continue;
			recvfrom(ctx.rtpSocket[i], buf, sizeof(buf), 0,
				(struct sockaddr*) &xsin, &xsinlen);
			if(ctx.rtpPortChecked[i] != 0)
				continue;
			// XXX: port should not flip-flop, so check only once
			if(xsin.sin_addr.s_addr != ctx.client.sin_addr.s_addr) {
				ga_error("RTP: client address mismatched? %u.%u.%u.%u != %u.%u.%u.%u\n",
					NIPQUAD(ctx.client.sin_addr.s_addr),
					NIPQUAD(xsin.sin_addr.s_addr));
				continue;
			}
			if(xsin.sin_port != ctx.rtpPeerPort[i]) {
				ga_error("RTP: client port reconfigured: %u -> %u\n",
					(unsigned int) ntohs(ctx.rtpPeerPort[i]),
					(unsigned int) ntohs(xsin.sin_port));
				ctx.rtpPeerPort[i] = xsin.sin_port;
			} else {
				ga_error("RTP: client is not under an NAT, port %d confirmed\n",
					(int) ntohs(ctx.rtpPeerPort[i]));
			}
			ctx.rtpPortChecked[i] = 1;
		}
		// is RTSP connection?
		if(FD_ISSET(ctx.fd, &rfds) == 0)
			continue;
#endif
		// read commands
		if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0) {
			goto quit;
		}
		// Interleaved binary data?
		if(buf[0] == '$') {
			handle_rtcp(&ctx, buf, rlen);
			continue;
		}
		// REQUEST line
		ga_error("%s", buf);
		p = buf;
		get_word(cmd, sizeof(cmd), &p);
		get_word(url, sizeof(url), &p);
		get_word(protocol, sizeof(protocol), &p);
		// check protocol
		if(strcmp(protocol, "RTSP/1.0") != 0) {
			rtsp_reply_error(&ctx, RTSP_STATUS_VERSION);
			goto quit;
		}
		// read headers
		bzero(header, sizeof(*header));
		do {
			int myseq = -1;
			char mysession[sizeof(header->session_id)] = "";
			if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0)
				goto quit;
			if(buf[0]=='\n' || (buf[0]=='\r' && buf[1]=='\n'))
				break;
#if 0
			ga_error("HEADER: %s", buf);
#endif
			// Special handling to CSeq & Session header
			// ff_rtsp_parse_line cannot handle CSeq & Session properly on Windows
			// any more?
			if(strncasecmp("CSeq: ", buf, 6) == 0) {
				myseq = strtol(buf+6, NULL, 10);
			}
			if(strncasecmp("Session: ", buf, 9) == 0) {
				strcpy(mysession, buf+9);
			}
			//
			ff_rtsp_parse_line(header, buf, NULL, NULL);
			//
			if(myseq > 0 && header->seq <= 0) {
				ga_error("WARNING: CSeq fixes applied (%d->%d).\n",
					header->seq, myseq);
				header->seq = myseq;
			}
			if(mysession[0] != '\0' && header->session_id[0]=='\0') {
				unsigned i;
				for(i = 0; i < sizeof(header->session_id)-1; i++) {
					if(mysession[i] == '\0'
					|| isspace(mysession[i])
					|| mysession[i] == ';')
						break;
					header->session_id[i] = mysession[i];
				}
				header->session_id[i+1] = '\0';
				ga_error("WARNING: Session fixes applied (%s)\n",
					header->session_id);
			}
		} while(1);
		// special handle to session_id
		if(header->session_id != NULL) {
			char *p = header->session_id;
			while(*p != '\0') {
				if(*p == '\r' || *p == '\n') {
					*p = '\0';
					break;
				}
				p++;
			}
		}
		// handle commands
		ctx.seq = header->seq;
		if (!strcmp(cmd, "DESCRIBE"))
			rtsp_cmd_describe(&ctx, url);
		else if (!strcmp(cmd, "OPTIONS"))
			rtsp_cmd_options(&ctx, url);
		else if (!strcmp(cmd, "SETUP"))
			rtsp_cmd_setup(&ctx, url, header);
		else if (!strcmp(cmd, "PLAY"))
			rtsp_cmd_play(&ctx, url, header);
		else if (!strcmp(cmd, "PAUSE"))
			rtsp_cmd_pause(&ctx, url, header);
		else if (!strcmp(cmd, "TEARDOWN"))
			rtsp_cmd_teardown(&ctx, url, header, 1);
		else
			rtsp_reply_error(&ctx, RTSP_STATUS_METHOD);
		if(ctx.state == SERVER_STATE_TEARDOWN) {
			break;
		}
	} while(1);
quit:
	ctx.state = SERVER_STATE_TEARDOWN;
	//
	close(ctx.fd);
	// 2014-05-20: support only share-encoder model
	ff_server_unregister_client(&ctx);
	//
	per_client_deinit(&ctx);
	//ga_error("RTSP client thread terminated (%d/%d clients left).\n",
	//	video_source_client_count(), audio_source_client_count());
	ga_error("RTSP client thread terminated.\n");
	//
	return NULL;
}
Exemplo n.º 5
0
/// TODO
static void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipename
	int cid;
	struct RTSPConf *rtspconf = NULL;
	//
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	struct timeval pkttv;
	//
	int video_written = 0;
	//
	rtspconf = rtspconf_global();
	cid = 0;
	// start encoding
	if(omx_streamer_start(&omxe[cid]) < 0) {
		sb_error("video encoder: start streamer failed.\n");
		goto video_quit;
	}
	//
	sb_error("video encoding started: tid=%ld.\n", sb_gettid());
	//
	while(vencoder_started != 0 && encoder_running() > 0) {
		//
		AVPacket pkt;
		unsigned char *enc;
		int encsize;
		// handle pts
		if(basePts == -1LL) {
			basePts = omxe[cid].frame_out;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + omxe[cid].frame_out - basePts;
		}
		//
		if((enc = omx_streamer_get(&omxe[cid], &encsize)) == NULL) {
			sb_error("video encoder: encode failed.\n");
			break;
		}
		if(encsize == 0)
			continue;
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// send packet
#ifdef SAVEFILE
		if(fout != NULL)
			fwrite(enc, sizeof(char), encsize, fout);
#endif
#if 0		// XXX: data check
		do {
			unsigned char *ptr, *nextptr;
			int offset, nextoffset, left = encsize;
			sb_error("XXX: %d bytes encoded ---------------\n", encsize);
			if((ptr = sb_find_startcode(enc, enc+encsize, &offset)) != enc) {
				sb_error("XXX: cannot find a start code\n");
				break;
			}
			while(ptr != NULL) {
				int nalsize;
				nextptr = sb_find_startcode(ptr+4, enc+encsize, &nextoffset);
				nalsize = nextptr != NULL ? nextptr-ptr : enc+encsize-ptr;
				sb_error("XXX: nal_t=%d, size=%d\n", ptr[offset] & 0x1f, nalsize);
				ptr = nextptr;
				offset = nextoffset;
			}
		} while(0);
#endif
		pkt.data = enc;
		pkt.size = encsize;
		if(encoder_send_packet("video-encoder", cid, &pkt, pkt.pts, NULL/*&pkttv*/) < 0) {
			goto video_quit;
		}
		if(video_written == 0) {
			video_written = 1;
			sb_error("first video frame written (pts=%lld)\n", pts);
		}
	}
	//
video_quit:
	//
	if(omx_streamer_prepare_stop(&omxe[cid]) < 0) {
		sb_error("streamer: prepare stop failed.\n");
	}
	//
	if(omx_streamer_stop(&omxe[cid]) < 0) {
		sb_error("streamer: start streamer failed.\n");
	}
	//
	sb_error("video encoder: thread terminated (tid=%ld).\n", sb_gettid());
	//
	return NULL;
}
Exemplo n.º 6
0
static int
vencoder_init(void *arg) {
	int iid, width, height, fps, bitrate, gopsize;
	int vsmode[3];
	char *pipefmt = (char*) arg;
	struct RTSPConf *rtspconf = rtspconf_global();
	omx_streamer_config_t sc;
	//
	if(rtspconf == NULL) {
		sb_error("video encoder: no configuration found\n");
		return -1;
	}
	if(vencoder_initialized != 0)
		return 0;
	//
	iid = 0;
	//
	if(sb_conf_readints("video-source-v4l-mode", vsmode, 3) == 3) {
		sb_error("video encoder: use user config: %d %d %d\n",
				vsmode[0], vsmode[1], vsmode[2]);
		width = vsmode[1];
		height = vsmode[2];
	} else {
		width = 640;
		height = 480;
	}
	if((fps = sb_conf_readint("video-fps")) <= 1) {
		fps = 24;
	}
	if((bitrate = sb_conf_mapreadint("video-specific", "b")) < 1000000) {
		bitrate = 3000000;
	}
	if((gopsize = sb_conf_mapreadint("video-specific", "g")) < 5) {
		gopsize = fps;
	}
	// search for photo seq
	if(init_image_storage() < 0) {
		return -1;
	}
	// register a dummy video source: only once
	if(vsource_initialized == 0) {
		vsource_config_t config;
		bzero(&config, sizeof(config));
		config.curr_width = width;
		config.curr_height = height;
		config.curr_stride = width;
		if(video_source_setup_ex(&config, 1) < 0) {
			sb_error("video encoder: setup dummy source failed (%dx%d)\n", width, height);
			return -1;
		}
		sb_error("video encoder: dummy source configured (%dx%d)\n", width, height);
		vsource_initialized = 1;
	}
	//
	sb_error("video encoder: mode=(ignored) (%dx%d), fps=%d\n",
		width, height, fps);
	// load configs
	bzero(&sc, sizeof(sc));
	sc.camera_sharpness = sb_omx_load_int("omx-camera-sharpness", -100, 100, OSCAM_DEF_SHARPNESS);
	sc.camera_contrast = sb_omx_load_int("omx-camera-contrast", -100, 100, OSCAM_DEF_CONTRAST);
	sc.camera_brightness = sb_omx_load_int("omx-camera-brightness", 0, 100, OSCAM_DEF_BRIGHTNESS);
	sc.camera_saturation = sb_omx_load_int("omx-camera-saturation", -100, 100, OSCAM_DEF_SATURATION);
	sc.camera_ev = sb_omx_load_int("omx-camera-ev", -10, 10, OSCAM_DEF_EXPOSURE_VALUE_COMPENSATION);
	sc.camera_iso = sb_omx_load_int("omx-camera-iso", 100, 800, OSCAM_DEF_EXPOSURE_ISO_SENSITIVITY);
	sc.camera_iso_auto = sb_omx_load_bool("omx-camera-iso-auto", OSCAM_DEF_EXPOSURE_AUTO_SENSITIVITY);
	sc.camera_frame_stabilisation = sb_omx_load_bool("omx-camera-frame-stabilisation", OSCAM_DEF_FRAME_STABILISATION);
	sc.camera_flip_horizon = sb_omx_load_bool("omx-camera-flip-horizon", OSCAM_DEF_FLIP_HORIZONTAL);
	sc.camera_flip_vertical = sb_omx_load_bool("omx-camera-flip-vertical", OSCAM_DEF_FLIP_VERTICAL);
	sc.camera_whitebalance =
		(enum OMX_WHITEBALCONTROLTYPE) sb_omx_load_int("camera-omx-whitebalance", 0, (int) OMX_WhiteBalControlMax, (int) OSCAM_DEF_WHITE_BALANCE_CONTROL);
	sc.camera_filter =
		(enum OMX_IMAGEFILTERTYPE) sb_omx_load_int("camera-omx-filter", (int) OMX_ImageFilterNone, (int) OMX_ImageFilterMax, (int) OSCAM_DEF_IMAGE_FILTER);
	//
	if(omx_streamer_init(&omxe[iid], NULL, width, height, fps, 1, bitrate, gopsize) < 0) {
		sb_error("video encoder: init failed.\n");
		return -1;
	}
	// register a dummy control
	ctrl_server_setreplay(camera_control);
	sb_error("video encoder: dummy control enabled.\n");
	//
	vencoder_initialized = 1;
	sb_error("video encoder: initialized (%d channels).\n", iid+1);
	return 0;
init_failed:
	vencoder_deinit(NULL);
	return -1;
}
Exemplo n.º 7
0
int
asource_init(void *arg) {
	int delay = 0;
	struct RTSPConf *rtspconf = rtspconf_global();
	if(initialized)
		return 0;
	//
	if((delay = ga_conf_readint("audio-init-delay")) > 0) {
		usleep(delay*1000);
	}
	//
	audioparam.channels = rtspconf->audio_channels;
	audioparam.samplerate = rtspconf->audio_samplerate;
	if(rtspconf->audio_device_format == AV_SAMPLE_FMT_S16) {
#ifdef WIN32
#else
		audioparam.format = SND_PCM_FORMAT_S16_LE;
#endif
		audioparam.bits_per_sample = 16;
	} else {
		ga_error("audio source: unsupported audio format (%d).\n",
			rtspconf->audio_device_format);
		return -1;
	}
	if(rtspconf->audio_device_channel_layout != AV_CH_LAYOUT_STEREO) {
		ga_error("audio source: unsupported channel layout (%llu).\n",
			rtspconf->audio_device_channel_layout);
		return -1;
	}
#ifdef WIN32
	if(ga_wasapi_init(&audioparam) < 0) {
		ga_error("WASAPI: initialization failed.\n");
		return -1;
	}
#else
	if((audioparam.handle = ga_alsa_init(&audioparam.sndlog)) == NULL) {
		ga_error("ALSA: initialization failed.\n");
		return -1;
	}
	if(ga_alsa_set_param(&audioparam) < 0) {
		ga_alsa_close(audioparam.handle, audioparam.sndlog);
		ga_error("ALSA: cannot set parameters\n");
		return -1;
	}
#endif
	if(audio_source_setup(audioparam.chunk_size, audioparam.samplerate, audioparam.bits_per_sample, audioparam.channels) < 0) {
		ga_error("audio source: setup failed.\n");
#ifdef WIN32
		ga_wasapi_close(&audioparam);
#else
		ga_alsa_close(audioparam.handle, audioparam.sndlog);
#endif
		return -1;
	}
	initialized = true;
	ga_error("audio source: setup chunk=%d, samplerate=%d, bps=%d, channels=%d\n",
		audioparam.chunk_size,
		audioparam.samplerate,
		audioparam.bits_per_sample,
		audioparam.channels);
	return 0;
}
void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipe
	// image info
	int iid;
	int iwidth;
	int iheight;
	int rtp_id;
	struct pooldata *data = NULL;
	struct vsource_frame *frame = NULL;
	pipeline *pipe = (pipeline*) arg;
	AVCodecContext *encoder = NULL;
	//
	AVFrame *pic_in = NULL;
	unsigned char *pic_in_buf = NULL;
	int pic_in_size;
	unsigned char *nalbuf = NULL, *nalbuf_a = NULL;
	int nalbuf_size = 0, nalign = 0;
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int resolution[2];
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: NULL pipeline specified.\n");
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	// init variables
	iid = ((struct vsource_config*) pipe->get_privdata())->id;
	iwidth = video_source_maxwidth(iid);
	iheight = video_source_maxheight(iid);
	rtp_id = ((struct vsource_config*) pipe->get_privdata())->rtp_id;
	//
	outputW = iwidth;	// by default, the same as max resolution
	outputH = iheight;
	if(ga_conf_readints("output-resolution", resolution, 2) == 2) {
		outputW = resolution[0];
		outputH = resolution[1];
	}
	//
	ga_error("video encoder: image source from '%s' (%dx%d) via channel %d, resolution=%dx%d.\n",
		pipe->name(), iwidth, iheight, rtp_id, outputW, outputH);
	//
	encoder = ga_avcodec_vencoder_init(
			NULL,
			rtspconf->video_encoder_codec,
			outputW, outputH,
			rtspconf->video_fps,
			rtspconf->vso);
	if(encoder == NULL) {
		ga_error("video encoder: cannot initialized the encoder.\n");
		goto video_quit;
	}
	//
	nalbuf_size = 100000+12 * outputW * outputH;
	if(ga_malloc(nalbuf_size, (void**) &nalbuf, &nalign) < 0) {
		ga_error("video encoder: buffer allocation failed, terminated.\n");
		goto video_quit;
	}
	nalbuf_a = nalbuf + nalign;
	//
	if((pic_in = avcodec_alloc_frame()) == NULL) {
		ga_error("video encoder: picture allocation failed, terminated.\n");
		goto video_quit;
	}
	pic_in_size = avpicture_get_size(PIX_FMT_YUV420P, outputW, outputH);
	if((pic_in_buf = (unsigned char*) av_malloc(pic_in_size)) == NULL) {
		ga_error("video encoder: picture buffer allocation failed, terminated.\n");
		goto video_quit;
	}
	avpicture_fill((AVPicture*) pic_in, pic_in_buf,
			PIX_FMT_YUV420P, outputW, outputH);
	//ga_error("video encoder: linesize = %d|%d|%d\n", pic_in->linesize[0], pic_in->linesize[1], pic_in->linesize[2]);
	// start encoding
	ga_error("video encoding started: tid=%ld %dx%d@%dfps, nalbuf_size=%d, pic_in_size=%d.\n",
		ga_gettid(),
		iwidth, iheight, rtspconf->video_fps,
		nalbuf_size, pic_in_size);
	//
	pipe->client_register(ga_gettid(), &cond);
	//
	while(encoder_running() > 0) {
		AVPacket pkt;
		int got_packet = 0;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (struct vsource_frame*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// XXX: assume always YUV420P
		if(pic_in->linesize[0] == frame->linesize[0]
		&& pic_in->linesize[1] == frame->linesize[1]
		&& pic_in->linesize[2] == frame->linesize[2]) {
			bcopy(frame->imgbuf, pic_in_buf, pic_in_size);
		} else {
			ga_error("video encoder: YUV mode failed - mismatched linesize(s) (src:%d,%d,%d; dst:%d,%d,%d)\n",
				frame->linesize[0], frame->linesize[1], frame->linesize[2],
				pic_in->linesize[0], pic_in->linesize[1], pic_in->linesize[2]);
			pipe->release_data(data);
			goto video_quit;
		}
		pipe->release_data(data);
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// encode
		pic_in->pts = pts;
		av_init_packet(&pkt);
		pkt.data = nalbuf_a;
		pkt.size = nalbuf_size;
		if(avcodec_encode_video2(encoder, &pkt, pic_in, &got_packet) < 0) {
			ga_error("video encoder: encode failed, terminated.\n");
			goto video_quit;
		}
		if(got_packet) {
			if(pkt.pts == (int64_t) AV_NOPTS_VALUE) {
				pkt.pts = pts;
			}
			pkt.stream_index = 0;
			// send the packet
			if(encoder_send_packet_all("video-encoder",
				rtp_id/*rtspconf->video_id*/, &pkt,
				pkt.pts) < 0) {
				goto video_quit;
			}
			// free unused side-data
			if(pkt.side_data_elems > 0) {
				int i;
				for (i = 0; i < pkt.side_data_elems; i++)
					av_free(pkt.side_data[i].data);
				av_freep(&pkt.side_data);
				pkt.side_data_elems = 0;
			}
			//
			if(video_written == 0) {
				video_written = 1;
				ga_error("first video frame written (pts=%lld)\n", pts);
			}
		}
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	if(pic_in_buf)	av_free(pic_in_buf);
	if(pic_in)	av_free(pic_in);
	if(nalbuf)	free(nalbuf);
	if(encoder)	ga_avcodec_close(encoder);
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}
Exemplo n.º 9
0
void*
rtspserver(void *arg) {
#ifdef WIN32
	SOCKET s = *((SOCKET*) arg);
	int sinlen = sizeof(struct sockaddr_in);
#else
	int s = *((int*) arg);
	socklen_t sinlen = sizeof(struct sockaddr_in);
#endif
	const char *p;
	char buf[8192];
	char cmd[32], url[1024], protocol[32];
	int rlen;
	struct sockaddr_in sin;
	RTSPContext ctx;
	RTSPMessageHeader header1, *header = &header1;
	int thread_ret;
	// image info
	int iwidth = video_source_width(0);
	int iheight = video_source_height(0);
	//
	rtspconf = rtspconf_global();
	sinlen = sizeof(sin);
	getpeername(s, (struct sockaddr*) &sin, &sinlen);
	//
	bzero(&ctx, sizeof(ctx));
	if(per_client_init(&ctx) < 0) {
		ga_error("server initialization failed.\n");
		return NULL;
	}
	ctx.state = SERVER_STATE_IDLE;
	// XXX: hasVideo is used to sync audio/video
	// This value is increased by 1 for each captured frame until it is gerater than zero
	// when this value is greater than zero, audio encoding then starts ...
	//ctx.hasVideo = -(rtspconf->video_fps>>1);	// for slow encoders?
	ctx.hasVideo = 0;	// with 'zerolatency'
	pthread_mutex_init(&ctx.rtsp_writer_mutex, NULL);
#if 0
	ctx.audioparam.channels = rtspconf->audio_channels;
	ctx.audioparam.samplerate = rtspconf->audio_samplerate;
	if(rtspconf->audio_device_format == AV_SAMPLE_FMT_S16) {
#ifdef WIN32
#else
		ctx.audioparam.format = SND_PCM_FORMAT_S16_LE;
#endif
		ctx.audioparam.bits_per_sample = 16;
	}
	//
	ga_error("INFO: image: %dx%d; audio: %d ch 16-bit pcm @ %dHz\n",
			iwidth, iheight,
			ctx.audioparam.channels,
			ctx.audioparam.samplerate);
#endif
	//
#if 0
#ifdef WIN32
	if(ga_wasapi_init(&ctx.audioparam) < 0) {
		ga_error("cannot init wasapi.\n");
		return NULL;
	}
#else
	if((ctx.audioparam.handle = ga_alsa_init(&ctx.audioparam.sndlog)) == NULL) {
		ga_error("cannot init alsa.\n");
		return NULL;
	}
	if(ga_alsa_set_param(&ctx.audioparam) < 0) {
		ga_error("cannot set alsa parameter\n");
		return NULL;
	}
#endif
#endif
	//
	ga_error("[tid %ld] client connected from %s:%d\n",
		ga_gettid(),
		inet_ntoa(sin.sin_addr), htons(sin.sin_port));
	//
	ctx.fd = s;
	//
	do {
		fd_set rfds;
		FD_ZERO(&rfds);
		FD_SET(ctx.fd, &rfds);
		if(select(ctx.fd+1, &rfds, NULL, NULL, NULL) <=0) {
			ga_error("select() failed: %s\n", strerror(errno));
			goto quit;
		}
		// read commands
		if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0) {
			goto quit;
		}
		// Interleaved binary data?
		if(buf[0] == '$') {
			handle_rtcp(&ctx, buf, rlen);
			continue;
		}
		// REQUEST line
		ga_error("%s", buf);
		p = buf;
		get_word(cmd, sizeof(cmd), &p);
		get_word(url, sizeof(url), &p);
		get_word(protocol, sizeof(protocol), &p);
		// check protocol
		if(strcmp(protocol, "RTSP/1.0") != 0) {
			rtsp_reply_error(&ctx, RTSP_STATUS_VERSION);
			goto quit;
		}
		// read headers
		bzero(header, sizeof(*header));
		do {
			int myseq = -1;
			char mysession[sizeof(header->session_id)] = "";
			if((rlen = rtsp_getnext(&ctx, buf, sizeof(buf))) < 0)
				goto quit;
			if(buf[0]=='\n' || (buf[0]=='\r' && buf[1]=='\n'))
				break;
#if 0
			ga_error("HEADER: %s", buf);
#endif
			// Special handling to CSeq & Session header
			// ff_rtsp_parse_line cannot handle CSeq & Session properly on Windows
			// any more?
			if(strncasecmp("CSeq: ", buf, 6) == 0) {
				myseq = strtol(buf+6, NULL, 10);
			}
			if(strncasecmp("Session: ", buf, 9) == 0) {
				strcpy(mysession, buf+9);
			}
			//
			ff_rtsp_parse_line(header, buf, NULL, NULL);
			//
			if(myseq > 0 && header->seq <= 0) {
				ga_error("WARNING: CSeq fixes applied (%d->%d).\n",
					header->seq, myseq);
				header->seq = myseq;
			}
			if(mysession[0] != '\0' && header->session_id[0]=='\0') {
				unsigned i;
				for(i = 0; i < sizeof(header->session_id)-1; i++) {
					if(mysession[i] == '\0'
					|| isspace(mysession[i])
					|| mysession[i] == ';')
						break;
					header->session_id[i] = mysession[i];
				}
				header->session_id[i+1] = '\0';
				ga_error("WARNING: Session fixes applied (%s)\n",
					header->session_id);
			}
		} while(1);
		// special handle to session_id
		if(header->session_id != NULL) {
			char *p = header->session_id;
			while(*p != '\0') {
				if(*p == '\r' || *p == '\n') {
					*p = '\0';
					break;
				}
				p++;
			}
		}
		// handle commands
		ctx.seq = header->seq;
		if (!strcmp(cmd, "DESCRIBE"))
			rtsp_cmd_describe(&ctx, url);
		else if (!strcmp(cmd, "OPTIONS"))
			rtsp_cmd_options(&ctx, url);
		else if (!strcmp(cmd, "SETUP"))
			rtsp_cmd_setup(&ctx, url, header);
		else if (!strcmp(cmd, "PLAY"))
			rtsp_cmd_play(&ctx, url, header);
		else if (!strcmp(cmd, "PAUSE"))
			rtsp_cmd_pause(&ctx, url, header);
		else if (!strcmp(cmd, "TEARDOWN"))
			rtsp_cmd_teardown(&ctx, url, header);
		else
			rtsp_reply_error(&ctx, RTSP_STATUS_METHOD);
		if(ctx.state == SERVER_STATE_TEARDOWN) {
			break;
		}
	} while(1);
quit:
	ctx.state = SERVER_STATE_TEARDOWN;
	//
	close(ctx.fd);
#ifdef	SHARE_ENCODER
	encoder_unregister_client(&ctx);
#else
	ga_error("connection closed, checking for worker threads...\n");
#if 0
	//
	if(ctx.vthreadId != 0) {
		video_source_notify_one(ctx.vthreadId);
	}
#endif
	pthread_join(ctx.vthread, (void**) &thread_ret);
#ifdef	ENABLE_AUDIO
	pthread_join(ctx.athread, (void**) &thread_ret);
#endif	/* ENABLE_AUDIO */
#endif	/* SHARE_ENCODER */
	//
	per_client_deinit(&ctx);
	//ga_error("RTSP client thread terminated (%d/%d clients left).\n",
	//	video_source_client_count(), audio_source_client_count());
	ga_error("RTSP client thread terminated.\n");
	//
	return NULL;
}
Exemplo n.º 10
0
/// TODO
static void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipename
	int cid;
	pooldata_t *data = NULL;
	vsource_frame_t *frame = NULL;
	char *pipename = (char*) arg;
	pipeline *pipe = pipeline::lookup(pipename);
	struct RTSPConf *rtspconf = NULL;
	//
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int outputW, outputH;
	//
	struct timeval pkttv;
#ifdef PRINT_LATENCY
	struct timeval ptv;
#endif
	//
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: invalid pipeline specified (%s).\n", pipename);
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	cid = ((vsource_t*) pipe->get_privdata())->channel;
	outputW = video_source_out_width(cid);
	outputH = video_source_out_height(cid);
	//
	// start encoding
	ga_error("video encoding started: tid=%ld.\n", ga_gettid());
	pipe->client_register(ga_gettid(), &cond);
	//
	while(vencoder_started != 0 && encoder_running() > 0) {
		//
		AVPacket pkt;
		unsigned char *enc;
		int encsize;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (vsource_frame_t*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// encode!
		gettimeofday(&pkttv, NULL);
		enc = vpu_encoder_encode(&vpu[cid], frame->imgbuf, vpu[cid].vpu_framesize, &encsize);
		//
		pipe->release_data(data);
		//
		if(enc == NULL) {
			ga_error("encoder-vpu: encode failed.\n");
			goto video_quit;
		}
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// send packet
#ifdef SAVEFILE
		if(fout != NULL)
			fwrite(enc, sizeof(char), encsize, fout);
#endif
		pkt.data = enc;
		pkt.size = encsize;
		if(encoder_send_packet_all("video-encoder", cid, &pkt, pkt.pts, &pkttv) < 0) {
			goto video_quit;
		}
		if(video_written == 0) {
			video_written = 1;
			ga_error("first video frame written (pts=%lld)\n", pts);
		}
#ifdef PRINT_LATENCY		/* print out latency */
		gettimeofday(&ptv, NULL);
		ga_aggregated_print(0x0001, 601, tvdiff_us(&ptv, &frame->timestamp));
#endif
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}
Exemplo n.º 11
0
void *
rtspserver_main(void *arg) {
#ifdef WIN32
	SOCKET s, cs;
	int csinlen;
#else
	int s, cs;
	socklen_t csinlen;
#endif
	struct sockaddr_in sin, csin;
	struct RTSPConf *conf = rtspconf_global();
	//
	if((s = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) < 0) {
		perror("socket");
		return (void *) -1;
	}
	//
	do {
#ifdef WIN32
		BOOL val = 1;
		setsockopt(s, SOL_SOCKET, SO_REUSEADDR, (const char*) &val, sizeof(val));
#else
		int val = 1;
		setsockopt(s, SOL_SOCKET, SO_REUSEADDR, &val, sizeof(val));
#endif
	} while(0);
	//
	bzero(&sin, sizeof(sin));
	sin.sin_family = AF_INET;
	sin.sin_port = htons(conf->serverport);
	//
	if(bind(s, (struct sockaddr*) &sin, sizeof(sin)) < 0) {
		perror("bind");
		return (void *) -1;
	}
	if(listen(s, 256) < 0) {
		perror("listen");
		return (void *) -1;
	}
	//
	do {
		pthread_t thread;
		//
		csinlen = sizeof(csin);
		bzero(&csin, sizeof(csin));
		if((cs = accept(s, (struct sockaddr*) &csin, &csinlen)) < 0) {
			perror("accept");
			return (void *) -1;
		}
		// tunning sending window
		do {
			int sndwnd = 8388608;	// 8MB
			if(setsockopt(cs, SOL_SOCKET, SO_SNDBUF, &sndwnd, sizeof(sndwnd)) == 0) {
				ga_error("*** set TCP sending buffer success.\n");
			} else {
				ga_error("*** set TCP sending buffer failed.\n");
			}
		} while(0);
		//
		if(pthread_create(&thread, NULL, rtspserver, &cs) != 0) {
			close(cs);
			ga_error("cannot create service thread.\n");
			continue;
		}
		pthread_detach(thread);
	} while(1);
	//
	return (void *) 0;
}
Exemplo n.º 12
0
static int
vencoder_init(void *arg) {
	int iid;
	char *pipefmt = (char*) arg;
	struct RTSPConf *rtspconf = rtspconf_global();
	//
	if(rtspconf == NULL) {
		ga_error("video encoder: no configuration found\n");
		return -1;
	}
	if(vencoder_initialized != 0)
		return 0;
	//
	for(iid = 0; iid < video_source_channels(); iid++) {
		char pipename[64];
		int outputW, outputH;
		pipeline *pipe;
		//
		_sps[iid] = _pps[iid] = NULL;
		_spslen[iid] = _ppslen[iid] = 0;
		snprintf(pipename, sizeof(pipename), pipefmt, iid);
		outputW = video_source_out_width(iid);
		outputH = video_source_out_height(iid);
		if((pipe = pipeline::lookup(pipename)) == NULL) {
			ga_error("video encoder: pipe %s is not found\n", pipename);
			goto init_failed;
		}
		ga_error("video encoder: video source #%d from '%s' (%dx%d).\n",
			iid, pipe->name(), outputW, outputH, iid);
		vencoder[iid] = ga_avcodec_vencoder_init(NULL,
				rtspconf->video_encoder_codec,
				outputW, outputH,
				rtspconf->video_fps, rtspconf->vso);
		if(vencoder[iid] == NULL)
			goto init_failed;
#ifdef STANDALONE_SDP
		// encoders for SDP generation
		switch(rtspconf->video_encoder_codec->id) {
		case AV_CODEC_ID_H264:
		case AV_CODEC_ID_H265:
		case AV_CODEC_ID_CAVS:
		case AV_CODEC_ID_MPEG4:
			// need ctx with CODEC_FLAG_GLOBAL_HEADER flag
			avc = avcodec_alloc_context3(rtspconf->video_encoder_codec);
			if(avc == NULL)
				goto init_failed;
			avc->flags |= CODEC_FLAG_GLOBAL_HEADER;
			avc = ga_avcodec_vencoder_init(avc,
				rtspconf->video_encoder_codec,
				outputW, outputH,
				rtspconf->video_fps, rtspconf->vso);
			if(avc == NULL)
				goto init_failed;
			ga_error("video encoder: meta-encoder #%d created.\n", iid);
			break;
		default:
			// do nothing
			break;
		}
		vencoder_sdp[iid] = avc;
#endif
	}
	vencoder_initialized = 1;
	ga_error("video encoder: initialized.\n");
	return 0;
init_failed:
	vencoder_deinit(NULL);
	return -1;
}