Example #1
0
static void
qos_schedule() {
	struct timeval now, timeout;
	timeout.tv_sec = qos_tv.tv_sec;
	timeout.tv_usec = qos_tv.tv_usec + QOS_INTERVAL_MS * 1000;
	timeout.tv_sec += (timeout.tv_usec / 1000000);
	timeout.tv_usec %= 1000000;
	gettimeofday(&now, NULL);
	qos_task = env->taskScheduler().scheduleDelayedTask(
			tvdiff_us(&timeout, &now), (TaskFunc*) qos_report, NULL);
	return;
}
Example #2
0
long long
ga_usleep(long long interval, struct timeval *ptv) {
	long long delta;
	struct timeval tv;
	if(ptv != NULL) {
		gettimeofday(&tv, NULL);
		delta = tvdiff_us(&tv, ptv);
		if(delta >= interval) {
			usleep(1);
			return -1;
		}
		interval -= delta;
	}
	usleep(interval);
	return 0LL;
}
Example #3
0
static void
qos_report(void *clientData) {
	int i;
	struct timeval now;
	long long elapsed;
	//
	gettimeofday(&now, NULL);
	elapsed = tvdiff_us(&now, &qos_tv);
	for(i = 0; i < n_qrec; i++) {
		RTPReceptionStatsDB::Iterator statsIter(qrec[i].rtpsrc->receptionStatsDB());
		// Assume that there's only one SSRC source (usually the case):
		RTPReceptionStats* stats = statsIter.next(True);
		unsigned pkts_expected, dExp;
		unsigned pkts_received, dRcvd;
		double KB_received, dKB;
		//
		if(stats == NULL)
			continue;
		pkts_expected = stats->totNumPacketsExpected();
		pkts_received = stats->totNumPacketsReceived();
		KB_received = stats->totNumKBytesReceived();
		// delta ...
		dExp = pkts_expected - qrec[i].pkts_expected;
		dRcvd = pkts_received - qrec[i].pkts_received;
		dKB = KB_received - qrec[i].KB_received;
		// show info
		ga_error("%s-report: %.0fKB rcvd; pkt-loss=%d/%d,%.2f%%; bitrate=%.0fKbps; jitter=%u (freq=%uHz)\n",
			//now.tv_sec, now.tv_usec,
			qrec[i].prefix, dKB, dExp-dRcvd, dExp, 100.0*(dExp-dRcvd)/dExp,
			8000000.0*dKB/elapsed,
			stats->jitter(),
			qrec[i].rtpsrc->timestampFrequency());
		//
		qrec[i].pkts_expected = pkts_expected;
		qrec[i].pkts_received = pkts_received;
		qrec[i].KB_received = KB_received;
	}
	// schedule next qos
	qos_tv = now;
	qos_schedule();
	return;
}
Example #4
0
/// TODO
static void *
vencoder_threadproc(void *arg) {
	// arg is pointer to source pipename
	int cid;
	pooldata_t *data = NULL;
	vsource_frame_t *frame = NULL;
	char *pipename = (char*) arg;
	pipeline *pipe = pipeline::lookup(pipename);
	struct RTSPConf *rtspconf = NULL;
	//
	long long basePts = -1LL, newpts = 0LL, pts = -1LL, ptsSync = 0LL;
	pthread_mutex_t condMutex = PTHREAD_MUTEX_INITIALIZER;
	pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
	//
	int outputW, outputH;
	//
	struct timeval pkttv;
#ifdef PRINT_LATENCY
	struct timeval ptv;
#endif
	//
	int video_written = 0;
	//
	if(pipe == NULL) {
		ga_error("video encoder: invalid pipeline specified (%s).\n", pipename);
		goto video_quit;
	}
	//
	rtspconf = rtspconf_global();
	cid = ((vsource_t*) pipe->get_privdata())->channel;
	outputW = video_source_out_width(cid);
	outputH = video_source_out_height(cid);
	//
	// start encoding
	ga_error("video encoding started: tid=%ld.\n", ga_gettid());
	pipe->client_register(ga_gettid(), &cond);
	//
	while(vencoder_started != 0 && encoder_running() > 0) {
		//
		AVPacket pkt;
		unsigned char *enc;
		int encsize;
		// wait for notification
		data = pipe->load_data();
		if(data == NULL) {
			int err;
			struct timeval tv;
			struct timespec to;
			gettimeofday(&tv, NULL);
			to.tv_sec = tv.tv_sec+1;
			to.tv_nsec = tv.tv_usec * 1000;
			//
			if((err = pipe->timedwait(&cond, &condMutex, &to)) != 0) {
				ga_error("viedo encoder: image source timed out.\n");
				continue;
			}
			data = pipe->load_data();
			if(data == NULL) {
				ga_error("viedo encoder: unexpected NULL frame received (from '%s', data=%d, buf=%d).\n",
					pipe->name(), pipe->data_count(), pipe->buf_count());
				continue;
			}
		}
		frame = (vsource_frame_t*) data->ptr;
		// handle pts
		if(basePts == -1LL) {
			basePts = frame->imgpts;
			ptsSync = encoder_pts_sync(rtspconf->video_fps);
			newpts = ptsSync;
		} else {
			newpts = ptsSync + frame->imgpts - basePts;
		}
		// encode!
		gettimeofday(&pkttv, NULL);
		enc = vpu_encoder_encode(&vpu[cid], frame->imgbuf, vpu[cid].vpu_framesize, &encsize);
		//
		pipe->release_data(data);
		//
		if(enc == NULL) {
			ga_error("encoder-vpu: encode failed.\n");
			goto video_quit;
		}
		// pts must be monotonically increasing
		if(newpts > pts) {
			pts = newpts;
		} else {
			pts++;
		}
		// send packet
#ifdef SAVEFILE
		if(fout != NULL)
			fwrite(enc, sizeof(char), encsize, fout);
#endif
		pkt.data = enc;
		pkt.size = encsize;
		if(encoder_send_packet_all("video-encoder", cid, &pkt, pkt.pts, &pkttv) < 0) {
			goto video_quit;
		}
		if(video_written == 0) {
			video_written = 1;
			ga_error("first video frame written (pts=%lld)\n", pts);
		}
#ifdef PRINT_LATENCY		/* print out latency */
		gettimeofday(&ptv, NULL);
		ga_aggregated_print(0x0001, 601, tvdiff_us(&ptv, &frame->timestamp));
#endif
	}
	//
video_quit:
	if(pipe) {
		pipe->client_unregister(ga_gettid());
		pipe = NULL;
	}
	//
	ga_error("video encoder: thread terminated (tid=%ld).\n", ga_gettid());
	//
	return NULL;
}
int
ga_wasapi_read(Xcap_wasapi_param *wasapi, unsigned char *wbuf, int wframes) {
	int i, copysize = 0, copyframe = 0;
	HRESULT hr;
	UINT32 packetLength, numFramesAvailable;
	BYTE *pData;
	DWORD flags;
	UINT64 framePos;
	int srcunit = wasapi->bits_per_sample / 8;
	int dstunit = audio_source_bitspersample() / 8;
	struct timeval beforeSleep, afterSleep;
	bool filldata = false;
	// frame statistics 
	struct timeval currtv;
	//
	if(wasapi->firstRead.tv_sec == 0) {
		gettimeofday(&wasapi->firstRead, NULL);
		wasapi->trimmedFrames = (UINT64) (1.0 * wasapi->samplerate *
			tvdiff_us(&wasapi->firstRead, &wasapi->initialTimestamp) /
			1000000);
		wasapi->silenceFrom = wasapi->firstRead;
		ga_error("WASAPI: estimated trimmed frames = %lld\n",
			wasapi->trimmedFrames);
	}
	//
	gettimeofday(&currtv, NULL);
	if(wasapi->lastTv.tv_sec == 0) {
		gettimeofday(&wasapi->lastTv, NULL);
		wasapi->frames = 0;
		wasapi->sframes = 0;
		wasapi->slept = 0;
	} else if(tvdiff_us(&currtv, &wasapi->lastTv) >= 1000000) {
#if 0
		ga_error(
			"Frame statistics: s=%d, ns=%d, sum=%d (sleep=%d)\n",
			wasapi->sframes, wasapi->frames,
			wasapi->sframes + wasapi->frames,
			wasapi->slept);
#endif
		wasapi->lastTv = currtv;
		wasapi->frames = wasapi->sframes = wasapi->slept = 0;
	}
	//
	if(wasapi->fillSilence > 0) {
		if(wasapi->fillSilence <= wframes) {
			copyframe = (int) wasapi->fillSilence;
		} else {
			copyframe = wframes;
		}
		copysize = copyframe * wasapi->channels * dstunit;
		ZeroMemory(wbuf, copysize);
		//
		wasapi->fillSilence -= copyframe;
		wframes -= copyframe;
		wasapi->sframes += copyframe;
		if(wframes <= 0) {
			return copyframe;
		}
	}
	//
	hr = wasapi->pCaptureClient->GetNextPacketSize(&packetLength);
	EXIT_ON_ERROR(hr, "WASAPI: Get packet size failed.\n");
	//
	if(packetLength == 0) {
		Sleep(wasapi->bufferFillInt);
		gettimeofday(&afterSleep, NULL);
		//
		wasapi->slept++;
		hr = wasapi->pCaptureClient->GetNextPacketSize(&packetLength);
		EXIT_ON_ERROR(hr, "WASAPI: Get packet size failed.\n");
		if(packetLength == 0) {
			// fill silence
			double silenceFrame = 1.0 *
				tvdiff_us(&afterSleep, &wasapi->silenceFrom) *
				wasapi->samplerate / 1000000.0;
			wasapi->fillSilence += (UINT64) silenceFrame;
			wasapi->silenceFrom = afterSleep;
		}
	}
	//
	while(packetLength != 0 && wframes >= (int) packetLength) {
		hr = wasapi->pCaptureClient->GetBuffer(&pData,
			&numFramesAvailable, &flags, &framePos, NULL);
		EXIT_ON_ERROR(hr, "WASAPI: Get buffer failed.\n");
		
		if(packetLength != numFramesAvailable) {
			ga_error("WARNING: packetLength(%d) != numFramesAvailable(%d)\n",
				packetLength, numFramesAvailable);
		}

		if(flags & AUDCLNT_BUFFERFLAGS_SILENT) {
			wasapi->sframes += numFramesAvailable;
			ZeroMemory(&wbuf[copysize], numFramesAvailable * wasapi->channels * dstunit);
			//ga_error("WASAPI-DEBUG: write slience (%d).\n", numFramesAvailable);
		} else {
			wasapi->frames += numFramesAvailable;
			if(wasapi->isFloat) {
				float *r = (float*) (pData);
				short *w = (short*) (&wbuf[copysize]);
				int cc = numFramesAvailable * wasapi->channels;
				for(i = 0; i < cc; i++) {
					*w = (short) (*r * 32768.0);
					r++;
					w++;
				}
			} else {
				CopyMemory(&wbuf[copysize], pData, numFramesAvailable * wasapi->channels * dstunit);
			}
			//ga_error("WASAPI-DEBUG: write data (%d).\n", numFramesAvailable);
		}

		wframes -= numFramesAvailable;
		copyframe += numFramesAvailable;
		copysize += numFramesAvailable * wasapi->channels * dstunit;

		hr = wasapi->pCaptureClient->ReleaseBuffer(numFramesAvailable);
		EXIT_ON_ERROR(hr, "WASAPI: Release buffer failed.\n");

		hr = wasapi->pCaptureClient->GetNextPacketSize(&packetLength);
		EXIT_ON_ERROR(hr, "WASAPI: Get packet size failed.\n");

		filldata = true;
	}
	//
	if(filldata) {
		gettimeofday(&wasapi->silenceFrom, NULL);
	}
	//
	return copyframe;
quit:
	return -1;
}