static void test_video_processing (void) {
	MSVideoSize src_size = { MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H };
	MSVideoSize src_dest = { MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H };
	mblk_t * yuv_block2;
	YuvBuf yuv;
	int y_bytes_per_row = src_size.width + src_size.width%32 ;
	uint8_t* y = (uint8_t*)ms_malloc(y_bytes_per_row*src_size.height); /*to allow bloc to work with multiple of 32*/
	int crcb_bytes_per_row = src_size.width/2 + (src_size.width/2)%32 ;
	uint8_t* cbcr = (uint8_t*)ms_malloc(crcb_bytes_per_row*src_size.height);
	int i,j;

	for (i=0;i<src_size.height*src_size.width;i++) {
		y[i]=i%256;
	}
	for (i=0;i<src_size.height*src_size.width/2;i++) {
		cbcr[i]=i%256;
	}

	yuv_block2 = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(	y
																					,cbcr
																					,0
																					, src_size.width
																					, src_size.height
																					, y_bytes_per_row
																					, crcb_bytes_per_row
																					, 1
																					, 0);

	BC_ASSERT_FALSE(ms_yuv_buf_init_from_mblk(&yuv, yuv_block2));

	BC_ASSERT_EQUAL(src_dest.width,yuv.w, int, "%d");
	BC_ASSERT_EQUAL(src_dest.height,yuv.h, int, "%d");

	/*check y*/
	for (i=0;i<yuv.h;i++) {
		for (j=0;j<yuv.w;j++)
		if (yuv.planes[0][i*yuv.strides[0]+j] != y[i*y_bytes_per_row+j]) {
			ms_error("Wrong value  [%i] at ofset [%i], should be [%i]",yuv.planes[0][i*yuv.strides[0]+j],i*yuv.strides[0]+j,y[i*y_bytes_per_row+j]);
			BC_FAIL("bad y value");
			break;
		}
	}

	/*check cb*/
	for (i=0;i<yuv.h/2;i++) {
		for (j=0;j<yuv.w/2;j++)
		if (yuv.planes[1][i*yuv.strides[1]+j] != cbcr[i*crcb_bytes_per_row+2*j]) {
			ms_error("Wrong value  [%i] at ofset [%i], should be [%i]",yuv.planes[1][i*yuv.strides[1]+j],i*yuv.strides[1]+j,y[i*crcb_bytes_per_row+2*j]);
			BC_FAIL("bad cb value");
			break;
		}
	}

	/*check cr*/
	for (i=0;i<yuv.h/2;i++) {
		for (j=0;j<yuv.w/2;j++)
		if (yuv.planes[2][i*yuv.strides[2]+j] != cbcr[i*crcb_bytes_per_row+2*j+1]) {
			ms_error("Wrong value  [%i] at ofset [%i], should be [%i]",yuv.planes[2][i*yuv.strides[2]+j],i*yuv.strides[2]+j,y[i*crcb_bytes_per_row+2*j+1]);
			BC_FAIL("bad cr value");
			break;
		}
	}

	ms_free(y);
	ms_free(cbcr);

}
Exemple #2
0
LpConfig *lp_config_new_with_factory(const char *config_filename, const char *factory_config_filename) {

	int fd;
	bctbx_vfs_file_t* pFile = NULL;

	LpConfig *lpconfig=lp_new0(LpConfig,1);
	lpconfig->g_bctbx_vfs = bctbx_vfs_get_default();
	
	lpconfig->refcnt=1;
	if (config_filename!=NULL){
		if(ortp_file_exist(config_filename) == 0) {
			lpconfig->filename=lp_realpath(config_filename, NULL);
			if(lpconfig->filename == NULL) {
				ms_error("Could not find the real path of %s: %s", config_filename, strerror(errno));
				goto fail;
			}
		} else {
			lpconfig->filename = ms_strdup(config_filename);
		}
		lpconfig->tmpfilename=ortp_strdup_printf("%s.tmp",lpconfig->filename);
		ms_message("Using (r/w) config information from %s", lpconfig->filename);

#if !defined(_WIN32)
		{
			struct stat fileStat;
			if ((stat(lpconfig->filename,&fileStat) == 0) && (S_ISREG(fileStat.st_mode))) {
				/* make existing configuration files non-group/world-accessible */
				if (chmod(lpconfig->filename, S_IRUSR | S_IWUSR) == -1) {
					ms_warning("unable to correct permissions on "
						"configuration file: %s", strerror(errno));
				}
			}
		}
#endif /*_WIN32*/
		/*open with r+ to check if we can write on it later*/

		pFile = bctbx_file_open(lpconfig->g_bctbx_vfs,lpconfig->filename, "r+");
		fd  = pFile->fd;
		lpconfig->pFile = pFile;
		
#ifdef RENAME_REQUIRES_NONEXISTENT_NEW_PATH
		if (fd  == -1){
			pFile = bctbx_file_open(lpconfig->g_bctbx_vfs,lpconfig->tmpfilename, "r+");
			if (fd){
				ms_warning("Could not open %s but %s works, app may have crashed during last sync.",lpconfig->filename,lpconfig->tmpfilename);
			}
		}
#endif
		if (fd != -1){
		    lp_config_parse(lpconfig, pFile);
			bctbx_file_close(pFile);
			lpconfig->pFile = NULL;
			lpconfig->modified=0;
		}
	}
	if (factory_config_filename != NULL) {
		lp_config_read_file(lpconfig, factory_config_filename);
	}
	return lpconfig;

fail:
	ms_free(lpconfig);
	return NULL;
}
Exemple #3
0
belle_sip_request_t* sal_op_build_request(SalOp *op,const char* method) {
	belle_sip_header_from_t* from_header;
	belle_sip_header_to_t* to_header;
	belle_sip_provider_t* prov=op->base.root->prov;
	belle_sip_request_t *req;
	belle_sip_uri_t* req_uri;
	belle_sip_uri_t* to_uri;

	const SalAddress* to_address;
	const MSList *elem=sal_op_get_route_addresses(op);
	char token[10];

	/* check that the op has a correct to address */
	to_address = sal_op_get_to_address(op);
	if( to_address == NULL ){
		ms_error("No To: address, cannot build request");
		return NULL;
	}
	
	to_uri = belle_sip_header_address_get_uri(BELLE_SIP_HEADER_ADDRESS(to_address));
	if( to_uri == NULL ){
		ms_error("To: address is invalid, cannot build request");
		return NULL;
	}

	if (strcmp("REGISTER",method)==0 || op->privacy==SalPrivacyNone) {
		from_header = belle_sip_header_from_create(BELLE_SIP_HEADER_ADDRESS(sal_op_get_from_address(op))
						,belle_sip_random_token(token,sizeof(token)));
	} else {
		from_header=belle_sip_header_from_create2("Anonymous <sip:[email protected]>",belle_sip_random_token(token,sizeof(token)));
	}
	/*make sure to preserve components like headers or port*/

	req_uri = (belle_sip_uri_t*)belle_sip_object_clone((belle_sip_object_t*)to_uri);
	belle_sip_uri_set_secure(req_uri,sal_op_is_secure(op));

	to_header = belle_sip_header_to_create(BELLE_SIP_HEADER_ADDRESS(to_address),NULL);

	req=belle_sip_request_create(
					req_uri,
					method,
					belle_sip_provider_create_call_id(prov),
					belle_sip_header_cseq_create(20,method),
					from_header,
					to_header,
					belle_sip_header_via_new(),
					70);

	if (op->privacy & SalPrivacyId) {
		belle_sip_header_p_preferred_identity_t* p_preferred_identity=belle_sip_header_p_preferred_identity_create(BELLE_SIP_HEADER_ADDRESS(sal_op_get_from_address(op)));
		belle_sip_message_add_header(BELLE_SIP_MESSAGE(req),BELLE_SIP_HEADER(p_preferred_identity));
	}

	if (elem && strcmp(method,"REGISTER")!=0 && !op->base.root->no_initial_route){
		add_initial_route_set(req,elem);
	}

	if (strcmp("REGISTER",method)!=0 && op->privacy!=SalPrivacyNone ){
		belle_sip_header_privacy_t* privacy_header=belle_sip_header_privacy_new();
		if (op->privacy&SalPrivacyCritical)
			belle_sip_header_privacy_add_privacy(privacy_header,sal_privacy_to_string(SalPrivacyCritical));
		if (op->privacy&SalPrivacyHeader)
			belle_sip_header_privacy_add_privacy(privacy_header,sal_privacy_to_string(SalPrivacyHeader));
		if (op->privacy&SalPrivacyId)
			belle_sip_header_privacy_add_privacy(privacy_header,sal_privacy_to_string(SalPrivacyId));
		if (op->privacy&SalPrivacyNone)
			belle_sip_header_privacy_add_privacy(privacy_header,sal_privacy_to_string(SalPrivacyNone));
		if (op->privacy&SalPrivacySession)
			belle_sip_header_privacy_add_privacy(privacy_header,sal_privacy_to_string(SalPrivacySession));
		if (op->privacy&SalPrivacyUser)
			belle_sip_header_privacy_add_privacy(privacy_header,sal_privacy_to_string(SalPrivacyUser));
		belle_sip_message_add_header(BELLE_SIP_MESSAGE(req),BELLE_SIP_HEADER(privacy_header));
	}
	belle_sip_message_add_header(BELLE_SIP_MESSAGE(req),op->base.root->supported);
	return req;
}
Exemple #4
0
static int ca_open_r(CAData *d){
	OSStatus result;
	UInt32 param;
	AudioDeviceID fInputDeviceID;
	
	ComponentDescription desc;  
	Component comp;
	
	// Get Default Input audio unit
	desc.componentType = kAudioUnitType_Output;
	desc.componentSubType = kAudioUnitSubType_HALOutput;
	desc.componentManufacturer = kAudioUnitManufacturer_Apple;
	desc.componentFlags = 0;
	desc.componentFlagsMask = 0;
	
	comp = FindNextComponent(NULL, &desc);
	if (comp == NULL)
	{
		ms_message("Cannot find audio component");
		return -1;
	}
	
	result = OpenAComponent(comp, &d->caInAudioUnit);
	if(result != noErr)
	{
		ms_message("Cannot open audio component %x", result);
		return -1;
	}
	
	param = 1;
	result = AudioUnitSetProperty(d->caInAudioUnit,
								  kAudioOutputUnitProperty_EnableIO,
								  kAudioUnitScope_Input,
								  1,
								  &param,
								  sizeof(UInt32));
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	param = 0;
	result = AudioUnitSetProperty(d->caInAudioUnit,
								  kAudioOutputUnitProperty_EnableIO,
								  kAudioUnitScope_Output,
								  0,
								  &param,
								  sizeof(UInt32));
	
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	// Set the current device to the default input unit.
	result = AudioUnitSetProperty(d->caInAudioUnit,
								  kAudioOutputUnitProperty_CurrentDevice,
								  kAudioUnitScope_Global,
								  0,
								  &d->dev,
								  sizeof(AudioDeviceID));
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	UInt32 asbdsize = sizeof(AudioStreamBasicDescription);
	memset((char *)&d->caInASBD, 0, asbdsize);
	
	result = AudioUnitGetProperty (d->caInAudioUnit,
								   kAudioUnitProperty_StreamFormat,
								   kAudioUnitScope_Input,
								   1,
								   &d->caInASBD,
								   &asbdsize);
	
	ms_message("AudioUnitGetProperty %i %x", result, result);
	
	
	if (d->caInASBD.mChannelsPerFrame>1)
	{
		d->caInASBD.mBytesPerFrame = d->caInASBD.mBytesPerFrame / d->caInASBD.mChannelsPerFrame;
		d->caInASBD.mBytesPerPacket = d->caInASBD.mBytesPerPacket / d->caInASBD.mChannelsPerFrame;		
		d->caInASBD.mChannelsPerFrame = 1;
	}
	
	result = AudioUnitSetProperty(d->caInAudioUnit,
								  kAudioUnitProperty_StreamFormat,
								  kAudioUnitScope_Output,
								  1,
								  &d->caInASBD,
								  sizeof(AudioStreamBasicDescription));
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	
	d->caSourceBuffer=NULL;
	
	// Get the number of frames in the IO buffer(s)
	param = sizeof(UInt32);
	UInt32 fAudioSamples;
	result = AudioUnitGetProperty(d->caInAudioUnit,
								  kAudioDevicePropertyBufferFrameSize,
								  kAudioUnitScope_Input,
								  1,
								  &fAudioSamples,
								  &param);
	if(result != noErr)
	{
		ms_error("failed to get audio sample size");
		return -1;
	}
	
	result = AudioUnitInitialize(d->caInAudioUnit);
	if(result != noErr)
	{
		ms_error("failed to AudioUnitInitialize input %i", result);
		return -1;
	}
	
	// Allocate our low device audio buffers
	d->fAudioBuffer = AllocateAudioBufferList(d->caInASBD.mChannelsPerFrame,
											  fAudioSamples * d->caInASBD.mBytesPerFrame * 2);
	if(d->fAudioBuffer == NULL)
	{
		ms_error("failed to allocate buffers fAudioBuffer");
		return -1;
	}
	// Allocate our low device audio buffers
	d->fMSBuffer = AllocateAudioBufferList( d->stereo ? 2 : 1,
										   fAudioSamples * ((d->bits / 8)*(d->stereo ? 2 : 1)) *2);
	if(d->fMSBuffer == NULL)
	{
		ms_error("failed to allocate buffers fMSBuffer");
		return -1;
	}
	
	return 0;
}
static mblk_t *jpeg2yuv(uint8_t *jpgbuf, int bufsize, MSVideoSize *reqsize){
#ifndef NO_FFMPEG
	AVCodecContext av_context;
	int got_picture=0;
	mblk_t *ret;
	struct SwsContext *sws_ctx;
	AVPacket pkt;
	MSPicture dest;
	AVCodec *codec=avcodec_find_decoder(CODEC_ID_MJPEG);
	AVFrame* orig = av_frame_alloc();

	if (codec==NULL){
		ms_error("Could not find MJPEG decoder in ffmpeg.");
		return NULL;
	}

	avcodec_get_context_defaults3(&av_context,NULL);
	if (avcodec_open2(&av_context,codec,NULL)<0){
		ms_error("jpeg2yuv: avcodec_open failed");
		return NULL;
	}
	av_init_packet(&pkt);
	pkt.data=jpgbuf;
	pkt.size=bufsize;

	if (avcodec_decode_video2(&av_context,orig,&got_picture,&pkt) < 0) {
		ms_error("jpeg2yuv: avcodec_decode_video failed");
		avcodec_close(&av_context);
		return NULL;
	}
	ret=ms_yuv_buf_alloc(&dest, reqsize->width,reqsize->height);
	/* not using SWS_FAST_BILINEAR because it doesn't play well with
	 * av_context.pix_fmt set to PIX_FMT_YUVJ420P by jpeg decoder */
	sws_ctx=sws_getContext(av_context.width,av_context.height,av_context.pix_fmt,
		reqsize->width,reqsize->height,PIX_FMT_YUV420P,SWS_BILINEAR,
                NULL, NULL, NULL);
	if (sws_ctx==NULL) {
		ms_error("jpeg2yuv: ms_sws_getContext() failed.");
		avcodec_close(&av_context);
		freemsg(ret);
		return NULL;
	}

#if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0)
	if (sws_scale(sws_ctx,(const uint8_t* const *)orig->data,orig->linesize,0,av_context.height,dest.planes,dest.strides)<0){
#else
	if (sws_scale(sws_ctx,(uint8_t**)orig->data,orig->linesize,0,av_context.height,dest.planes,dest.strides)<0){
#endif
		ms_error("jpeg2yuv: ms_sws_scale() failed.");
		sws_freeContext(sws_ctx);
		avcodec_close(&av_context);
		freemsg(ret);
		return NULL;
	}
	sws_freeContext(sws_ctx);
	av_frame_free(&orig);
	avcodec_close(&av_context);
	return ret;
#elif TARGET_OS_IPHONE
	MSPicture dest;
	CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, jpgbuf, bufsize, NULL);
	// use the data provider to get a CGImage; release the data provider
	CGImageRef image = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, FALSE,
						kCGRenderingIntentDefault);
						CGDataProviderRelease(dataProvider);
	reqsize->width = CGImageGetWidth(image);
	reqsize->height = CGImageGetHeight(image);

	uint8_t* tmp = (uint8_t*) malloc(reqsize->width * reqsize->height * 4);
	mblk_t* ret=ms_yuv_buf_alloc(&dest, reqsize->width, reqsize->height);
	CGColorSpaceRef colourSpace = CGColorSpaceCreateDeviceRGB();
	CGContextRef imageContext =
	CGBitmapContextCreate(tmp, reqsize->width, reqsize->height, 8, reqsize->width*4, colourSpace, kCGImageAlphaNoneSkipLast);
	CGColorSpaceRelease(colourSpace);
	// draw the image to the context, release it
	CGContextDrawImage(imageContext, CGRectMake(0, 0, reqsize->width, reqsize->height), image);
	CGImageRelease(image);

	/* convert tmp/RGB -> ret/YUV */
	for(int y=0; y<reqsize->height; y++) {
		for(int x=0; x<reqsize->width; x++) {
			uint8_t r = tmp[y * reqsize->width * 4 + x * 4 + 0];
			uint8_t g = tmp[y * reqsize->width * 4 + x * 4 + 1];
			uint8_t b = tmp[y * reqsize->width * 4 + x * 4 + 2];

			// Y
			*dest.planes[0]++ = (uint8_t)((0.257 * r) + (0.504 * g) + (0.098 * b) + 16);

			// U/V subsampling
			if ((y % 2==0) && (x%2==0)) {
				uint32_t r32=0, g32=0, b32=0;
				for(int i=0; i<2; i++) {
					for(int j=0; j<2; j++) {
						r32 += tmp[(y+i) * reqsize->width * 4 + (x+j) * 4 + 0];
						g32 += tmp[(y+i) * reqsize->width * 4 + (x+j) * 4 + 1];
						b32 += tmp[(y+i) * reqsize->width * 4 + (x+j) * 4 + 2];
					}
				}
				r32 = (uint32_t)(r32 * 0.25f); g32 = (uint32_t)(g32 * 0.25f); b32 = (uint32_t) (b32 * 0.25f);

				// U
				*dest.planes[1]++ = (uint8_t)(-(0.148 * r32) - (0.291 * g32) + (0.439 * b32) + 128);
				// V
				*dest.planes[2]++ = (uint8_t)((0.439 * r32) - (0.368 * g32) - (0.071 * b32) + 128);
			}
		}
	}
	free(tmp);
	return ret;
#else
	return NULL;
#endif
}




mblk_t *ms_load_jpeg_as_yuv(const char *jpgpath, MSVideoSize *reqsize){
#if defined(WIN32)
	mblk_t *m=NULL;
	DWORD st_sizel;
	DWORD st_sizeh;
	uint8_t *jpgbuf;
	DWORD err;
	HANDLE fd;

#ifdef UNICODE
	WCHAR wUnicode[1024];
	MultiByteToWideChar(CP_UTF8, 0, jpgpath, -1, wUnicode, 1024);
	fd = CreateFile(wUnicode, GENERIC_READ, FILE_SHARE_READ, NULL,
        OPEN_EXISTING, 0, NULL);
#else
	fd = CreateFile(jpgpath, GENERIC_READ, FILE_SHARE_READ, NULL,
        OPEN_EXISTING, 0, NULL);
#endif
	if (fd==INVALID_HANDLE_VALUE){
		ms_error("Failed to open %s",jpgpath);
		return NULL;
	}
	st_sizel=0;
	st_sizeh=0;
	st_sizel = GetFileSize(fd, &st_sizeh);
	if (st_sizeh>0 || st_sizel<=0)
	{
		CloseHandle(fd);
		ms_error("Can't load file %s",jpgpath);
		return NULL;
	}
	jpgbuf=(uint8_t*)ms_malloc0(st_sizel);
	if (jpgbuf==NULL)
	{
		CloseHandle(fd);
		ms_error("Cannot allocate buffer for %s",jpgpath);
		return NULL;
	}
	err=0;
	ReadFile(fd, jpgbuf, st_sizel, &err, NULL) ;

	if (err!=st_sizel){
		  ms_error("Could not read as much as wanted !");
	}
	m=jpeg2yuv(jpgbuf,st_sizel,reqsize);
	ms_free(jpgbuf);
	if (m==NULL)
	{
		CloseHandle(fd);
		ms_error("Cannot load image from buffer for %s",jpgpath);
		return NULL;
	}
	CloseHandle(fd);
	return m;
#else
	mblk_t *m=NULL;
	struct stat statbuf;
	uint8_t *jpgbuf;
	int err;
	int fd=open(jpgpath,O_RDONLY);

	if (fd!=-1){
		fstat(fd,&statbuf);
		if (statbuf.st_size<=0)
		{
			close(fd);
			ms_error("Cannot load %s",jpgpath);
			return NULL;
		}
		jpgbuf=(uint8_t*)ms_malloc0(statbuf.st_size + FF_INPUT_BUFFER_PADDING_SIZE);
		if (jpgbuf==NULL)
		{
			close(fd);
			ms_error("Cannot allocate buffer for %s",jpgpath);
			return NULL;
		}
		err=read(fd,jpgbuf,statbuf.st_size);
		if (err!=statbuf.st_size){
			ms_error("Could not read as much as wanted: %i<>%li !",err,(long)statbuf.st_size);
		}
		m=jpeg2yuv(jpgbuf,statbuf.st_size,reqsize);
		ms_free(jpgbuf);
		if (m==NULL)
		{
			close(fd);
			ms_error("Cannot load image from buffer for %s",jpgpath);
			return NULL;
		}
	}else{
		ms_error("Cannot load %s",jpgpath);
		return NULL;
	}
	close(fd);
	return m;
#endif
}
static int video_capture_set_vsize(MSFilter *f, void* data){
	AndroidReaderContext* d = (AndroidReaderContext*) f->data;
	ms_mutex_lock(&d->mutex);

	d->requestedSize=*(MSVideoSize*)data;

	// always request landscape mode, orientation is handled later
	if (d->requestedSize.height > d->requestedSize.width) {
		int tmp = d->requestedSize.height;
		d->requestedSize.height = d->requestedSize.width;
		d->requestedSize.width = tmp;
	}

	JNIEnv *env = ms_get_jni_env();

	jmethodID method = env->GetStaticMethodID(d->helperClass,"selectNearestResolutionAvailable", "(III)[I");

	// find neareast hw-available resolution (using jni call);
	jobject resArray = env->CallStaticObjectMethod(d->helperClass, method, ((AndroidWebcamConfig*)d->webcam->data)->id, d->requestedSize.width, d->requestedSize.height);

	if (!resArray) {
		ms_error("Failed to retrieve camera '%d' supported resolutions\n", ((AndroidWebcamConfig*)d->webcam->data)->id);
		return -1;
	}

	// handle result :
	//   - 0 : width
    //   - 1 : height
    //   - 2 : useDownscaling
	jint res[3];
   env->GetIntArrayRegion((jintArray)resArray, 0, 3, res);
	ms_message("Camera selected resolution is: %dx%d (requested: %dx%d) with downscaling?%d\n", res[0], res[1], d->requestedSize.width, d->requestedSize.height, res[2]);
	d->hwCapableSize.width =  res[0];
	d->hwCapableSize.height = res[1];
	d->useDownscaling = res[2];

	int rqSize = d->requestedSize.width * d->requestedSize.height;
	int hwSize = d->hwCapableSize.width * d->hwCapableSize.height;
	double downscale = d->useDownscaling ? 0.5 : 1;

	// if hw supplies a smaller resolution, modify requested size accordingly
	if ((hwSize * downscale * downscale) < rqSize) {
		ms_message("Camera cannot produce requested resolution %dx%d, will supply smaller one: %dx%d\n",
			d->requestedSize.width, d->requestedSize.height, (int) (res[0] * downscale), (int) (res[1]*downscale));
		d->requestedSize.width = (int) (d->hwCapableSize.width * downscale);
		d->requestedSize.height = (int) (d->hwCapableSize.height * downscale);
	} else if ((hwSize * downscale * downscale) > rqSize) {
		ms_message("Camera cannot produce requested resolution %dx%d, will capture a bigger one (%dx%d) and crop it to match encoder requested resolution\n",
			d->requestedSize.width, d->requestedSize.height, (int)(res[0] * downscale), (int)(res[1] * downscale));
	}
	
	// is phone held |_ to cam orientation ?
	if (d->rotation == UNDEFINED_ROTATION || compute_image_rotation_correction(d, d->rotation) % 180 != 0) {
		if (d->rotation == UNDEFINED_ROTATION) {
			ms_error("To produce a correct image, Mediastreamer MUST be aware of device's orientation BEFORE calling 'configure_video_source'\n"); 
			ms_warning("Capture filter do not know yet about device's orientation.\n"
				"Current assumption: device is held perpendicular to its webcam (ie: portrait mode for a phone)\n");
			d->rotationSavedDuringVSize = 0;
		} else {
			d->rotationSavedDuringVSize = d->rotation;
		}
		bool camIsLandscape = d->hwCapableSize.width > d->hwCapableSize.height;
		bool reqIsLandscape = d->requestedSize.width > d->requestedSize.height;

		// if both are landscape or both portrait, swap
		if (camIsLandscape == reqIsLandscape) {
			int t = d->requestedSize.width;
			d->requestedSize.width = d->requestedSize.height;
			d->requestedSize.height = t;
			ms_message("Swapped resolution width and height to : %dx%d\n", d->requestedSize.width, d->requestedSize.height);
		}
	} else {
		d->rotationSavedDuringVSize = d->rotation;
	}

	ms_mutex_unlock(&d->mutex);
	return 0;
}
Exemple #7
0
static void cacard_detect(MSSndCardManager * m)
{
#ifndef TARGET_OS_IPHONE
	OSStatus err;
	UInt32 slen;
	int count;
	Boolean writable;
	int i;
	writable = 0;
	slen = 0;
	err =
	AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &slen,
								 &writable);
	if (err != kAudioHardwareNoError) {
		ms_error("get kAudioHardwarePropertyDevices error %ld", err);
		return;
	}
	AudioDeviceID V[slen / sizeof(AudioDeviceID)];
	err =
	AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &slen, V);
	if (err != kAudioHardwareNoError) {
		ms_error("get kAudioHardwarePropertyDevices error %ld", err);
		return;
	}
	count = slen / sizeof(AudioDeviceID);
	for (i = 0; i < count; i++) {
		char devname_in[256];
		char uidname_in[256];
		char devname_out[256];
		char uidname_out[256];
		int cap = 0;
		
		/* OUTPUT CARDS */
		slen = 256;
		err =
		AudioDeviceGetProperty(V[i], 0, FALSE,
							   kAudioDevicePropertyDeviceName, &slen,
							   devname_out);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			continue;
		}
		slen = strlen(devname_out);
		/* trim whitespace */
		while ((slen > 0) && (devname_out[slen - 1] == ' ')) {
			slen--;
		}
		devname_out[slen] = '\0';
		
		err =
		AudioDeviceGetPropertyInfo(V[i], 0, FALSE,
								   kAudioDevicePropertyStreamConfiguration,
								   &slen, &writable);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			continue;
		}
		
		AudioBufferList *buflist = ms_malloc(slen);
		if (buflist == NULL) {
			ms_error("alloc AudioBufferList %ld", err);
			continue;
		}
		
		err =
		AudioDeviceGetProperty(V[i], 0, FALSE,
							   kAudioDevicePropertyStreamConfiguration,
							   &slen, buflist);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			ms_free(buflist);
			continue;
		}
		
		UInt32 j;
		for (j = 0; j < buflist->mNumberBuffers; j++) {
			if (buflist->mBuffers[j].mNumberChannels > 0) {
				cap = MS_SND_CARD_CAP_PLAYBACK;
				break;
			}
		}
		
		ms_free(buflist);
		
		/* INPUT CARDS */
		slen = 256;
		err =
		AudioDeviceGetProperty(V[i], 0, TRUE,
							   kAudioDevicePropertyDeviceName, &slen,
							   devname_in);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			continue;
		}
		slen = strlen(devname_in);
		/* trim whitespace */
		while ((slen > 0) && (devname_in[slen - 1] == ' ')) {
			slen--;
		}
		devname_in[slen] = '\0';
		
		err =
		AudioDeviceGetPropertyInfo(V[i], 0, TRUE,
								   kAudioDevicePropertyStreamConfiguration,
								   &slen, &writable);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			continue;
		}
		
		
		err =
		AudioDeviceGetPropertyInfo(V[i], 0, TRUE,
								   kAudioDevicePropertyStreamConfiguration,
								   &slen, &writable);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			continue;
		}
		buflist = ms_malloc(slen);
		if (buflist == NULL) {
			ms_error("alloc error %ld", err);
			continue;
		}
		
		err =
		AudioDeviceGetProperty(V[i], 0, TRUE,
							   kAudioDevicePropertyStreamConfiguration,
							   &slen, buflist);
		if (err != kAudioHardwareNoError) {
			ms_error("get kAudioDevicePropertyDeviceName error %ld", err);
			ms_free(buflist);
			continue;
		}
		
		for (j = 0; j < buflist->mNumberBuffers; j++) {
			if (buflist->mBuffers[j].mNumberChannels > 0) {
				cap |= MS_SND_CARD_CAP_CAPTURE;
				break;
			}
		}
		
		ms_free(buflist);
		
		if (cap & MS_SND_CARD_CAP_PLAYBACK) {
			CFStringRef dUID_out;
			dUID_out = NULL;
			slen = sizeof(CFStringRef);
			err =
		    AudioDeviceGetProperty(V[i], 0, false,
								   kAudioDevicePropertyDeviceUID, &slen,
								   &dUID_out);
			if (err != kAudioHardwareNoError) {
				ms_error("get kAudioHardwarePropertyDevices error %ld", err);
				continue;
			}
			CFStringGetCString(dUID_out, uidname_out, 256,
							   CFStringGetSystemEncoding());
			ms_message("CA: devname_out:%s uidname_out:%s", devname_out, uidname_out);
			
			AudioStreamBasicDescription devicewriteFormat;
			slen = sizeof(devicewriteFormat);
			err = AudioDeviceGetProperty(V[i], 0, false,
										 kAudioDevicePropertyStreamFormat,
										 &slen, &devicewriteFormat);
			if (err == kAudioHardwareNoError) {
				show_format("output device", &devicewriteFormat);
			}
			MSSndCard *card = ca_card_new(devname_out, dUID_out, V[i], MS_SND_CARD_CAP_PLAYBACK);
			ms_snd_card_manager_add_card(m, card);
		}
		
		if (cap & MS_SND_CARD_CAP_CAPTURE) {
			CFStringRef dUID_in;
			dUID_in = NULL;
			slen = sizeof(CFStringRef);
			err =
		    AudioDeviceGetProperty(V[i], 0, true,
								   kAudioDevicePropertyDeviceUID, &slen,
								   &dUID_in);
			if (err != kAudioHardwareNoError) {
				ms_error("get kAudioHardwarePropertyDevices error %ld", err);
				continue;
			}
			CFStringGetCString(dUID_in, uidname_in, 256,
							   CFStringGetSystemEncoding());
			ms_message("CA: devname_in:%s uidname_in:%s", devname_in, uidname_in);
			
			AudioStreamBasicDescription devicereadFormat;
			slen = sizeof(devicereadFormat);
			err = AudioDeviceGetProperty(V[i], 0, true,
										 kAudioDevicePropertyStreamFormat,
										 &slen, &devicereadFormat);
			if (err == kAudioHardwareNoError) {
				show_format("input device", &devicereadFormat);
			}
			MSSndCard *card = ca_card_new(devname_in, dUID_in, V[i], MS_SND_CARD_CAP_CAPTURE);
			ms_snd_card_manager_add_card(m, card);
		}
	}
#else
	AudioStreamBasicDescription deviceFormat;
	memset(&deviceFormat, 0, sizeof(AudioStreamBasicDescription));
	
	MSSndCard *card = ca_card_new("AudioUnit Device", NULL, 0 /*?*/, MS_SND_CARD_CAP_PLAYBACK|MS_SND_CARD_CAP_CAPTURE);
	ms_snd_card_manager_add_card(m, card);
#endif
}
Exemple #8
0
static void readCallback(void *aqData,
                         AudioQueueRef inAQ,
                         AudioQueueBufferRef inBuffer,
                         const AudioTimeStamp * inStartTime,
                         UInt32 inNumPackets,
                         const AudioStreamPacketDescription * inPacketDesc)
{
    AQData *d = (AQData *) aqData;
    OSStatus err;
    mblk_t *rm = NULL;

    UInt32 len =
        (inBuffer->mAudioDataByteSize * d->readAudioFormat.mSampleRate /
         1) / d->devicereadFormat.mSampleRate /
        d->devicereadFormat.mChannelsPerFrame;

    ms_mutex_lock(&d->mutex);
    if (d->read_started == FALSE) {
        ms_mutex_unlock(&d->mutex);
        return;
    }

    rm = allocb(len, 0);

#if 0
    err = AudioConverterConvertBuffer(d->readAudioConverter,
                                      inBuffer->mAudioDataByteSize,
                                      inBuffer->mAudioData,
                                      &len, rm->b_wptr);
    if (err != noErr) {
        ms_error("readCallback: AudioConverterConvertBuffer %d", err);
        ms_warning("readCallback: inBuffer->mAudioDataByteSize = %d",
                   inBuffer->mAudioDataByteSize);
        ms_warning("readCallback: outlen = %d", len);
        ms_warning("readCallback: origlen = %i",
                   (inBuffer->mAudioDataByteSize *
                    d->readAudioFormat.mSampleRate / 1) /
                   d->devicereadFormat.mSampleRate /
                   d->devicereadFormat.mChannelsPerFrame);
        freeb(rm);
    } else {

        rm->b_wptr += len;
        if (gain_volume_in != 1.0f)
        {
            int16_t *ptr=(int16_t *)rm->b_rptr;
            for (; ptr<(int16_t *)rm->b_wptr; ptr++)
            {
                *ptr=(int16_t)(((float)(*ptr))*gain_volume_in);
            }
        }
        putq(&d->rq, rm);
    }
#else
    memcpy(rm->b_wptr, inBuffer->mAudioData, len);
    rm->b_wptr += len;
    if (gain_volume_in != 1.0f)
    {
        int16_t *ptr=(int16_t *)rm->b_rptr;
        for (; ptr<(int16_t *)rm->b_wptr; ptr++)
        {
            *ptr=(int16_t)(((float)(*ptr))*gain_volume_in);
        }
    }
    putq(&d->rq, rm);
#endif

    err = AudioQueueEnqueueBuffer(d->readQueue, inBuffer, 0, NULL);
    if (err != noErr) {
        ms_error("readCallback:AudioQueueEnqueueBuffer %ld", err);
    }
    ms_mutex_unlock(&d->mutex);
}
Exemple #9
0
static void aq_start_r(MSFilter * f)
{
    AQData *d = (AQData *) f->data;
    if (d->read_started == FALSE) {
        OSStatus aqresult;

        d->readAudioFormat.mSampleRate = d->rate;
        d->readAudioFormat.mFormatID = kAudioFormatLinearPCM;
        d->readAudioFormat.mFormatFlags =
            kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
        d->readAudioFormat.mFramesPerPacket = 1;
        d->readAudioFormat.mChannelsPerFrame = 1;
        d->readAudioFormat.mBitsPerChannel = d->bits;
        d->readAudioFormat.mBytesPerPacket = d->bits / 8;
        d->readAudioFormat.mBytesPerFrame = d->bits / 8;

        //show_format("input device", &d->devicereadFormat);
        //show_format("data from input filter", &d->readAudioFormat);

        memcpy(&d->devicereadFormat, &d->readAudioFormat,
               sizeof(d->readAudioFormat));
        d->readBufferByteSize =
            kSecondsPerBuffer * d->devicereadFormat.mSampleRate *
            (d->devicereadFormat.mBitsPerChannel / 8) *
            d->devicereadFormat.mChannelsPerFrame;

#if 0
        aqresult = AudioConverterNew(&d->devicereadFormat,
                                     &d->readAudioFormat,
                                     &d->readAudioConverter);
        if (aqresult != noErr) {
            ms_error("d->readAudioConverter = %d", aqresult);
            d->readAudioConverter = NULL;
        }
#endif

        aqresult = AudioQueueNewInput(&d->devicereadFormat, readCallback, d,	// userData
                                      NULL,	// run loop
                                      NULL,	// run loop mode
                                      0,	// flags
                                      &d->readQueue);
        if (aqresult != noErr) {
            ms_error("AudioQueueNewInput = %ld", aqresult);
        }

        if (d->uidname!=NULL) {
            char uidname[256];
            CFStringGetCString(d->uidname, uidname, 256,
                               CFStringGetSystemEncoding());
            ms_message("AQ: using uidname:%s", uidname);
            aqresult =
                AudioQueueSetProperty(d->readQueue,
                                      kAudioQueueProperty_CurrentDevice,
                                      &d->uidname, sizeof(CFStringRef));
            if (aqresult != noErr) {
                ms_error
                ("AudioQueueSetProperty on kAudioQueueProperty_CurrentDevice %ld",
                 aqresult);
            }
        }

        setupRead(f);
        aqresult = AudioQueueStart(d->readQueue, NULL);	// start time. NULL means ASAP.
        if (aqresult != noErr) {
            ms_error("AudioQueueStart -read- %ld", aqresult);
        }
        d->read_started = TRUE;
    }
}
Exemple #10
0
int ms_discover_mtu(const char *host){
	int sock;
	int err,mtu=0,new_mtu;
	socklen_t optlen;
	char port[10];
	struct addrinfo hints,*ai=NULL;
	int family = PF_INET;
	int rand_port;
	int retry=0;
	struct timeval tv;

	/* Try to get the address family of the host (PF_INET or PF_INET6). */
	memset(&hints, 0, sizeof(hints));
	hints.ai_family = PF_UNSPEC;
	hints.ai_flags = AI_NUMERICHOST;
	err = getaddrinfo(host, NULL, &hints, &ai);
	if (err == 0) family = ai->ai_family;

	memset(&hints,0,sizeof(hints));
	hints.ai_family = family;
	hints.ai_socktype = SOCK_DGRAM;
	
	gettimeofday(&tv,NULL);	
	srandom(tv.tv_usec);
	rand_port=random() & 0xFFFF;
	if (rand_port<1000) rand_port+=1000;
	snprintf(port,sizeof(port),"%i",rand_port);
	err=getaddrinfo(host,port,&hints,&ai);
	if (err!=0){
		ms_error("getaddrinfo(): %s\n",gai_strerror(err));
		return -1;
	}
	sock=socket(family,SOCK_DGRAM,0);
	if(sock < 0)
	{
		ms_error("socket(): %s",strerror(errno));
		return sock;
	}
	mtu = (family == PF_INET6) ? IPV6_PMTUDISC_DO: IP_PMTUDISC_DO;
	optlen=sizeof(mtu);
	err=setsockopt(sock,(family == PF_INET6) ? IPPROTO_IPV6 : IPPROTO_IP,(family == PF_INET6) ? IPV6_MTU_DISCOVER : IP_MTU_DISCOVER,&mtu,optlen);
	if (err!=0){
		ms_error("setsockopt(): %s",strerror(errno));
		err = close(sock);
		if (err!=0)
			ms_error("close(): %s", strerror(errno));
		return -1;
	}
	err=connect(sock,ai->ai_addr,ai->ai_addrlen);
	freeaddrinfo(ai);
	if (err!=0){
		ms_error("connect(): %s",strerror(errno));
		err = close(sock);
		if (err !=0)
			ms_error("close(): %s", strerror(errno));
		return -1;
	}
	mtu=1500;
	do{
		int send_returned;
		int datasize = mtu - (UDP_HEADER_SIZE + ((family == PF_INET6) ? IPV6_HEADER_SIZE : IPV4_HEADER_SIZE));	/*minus IP+UDP overhead*/
		char *buf=ms_malloc0(datasize);

		send_returned = send(sock,buf,datasize,0);
		if (send_returned==-1){
			/*ignore*/
		}
		ms_free(buf);
		usleep(500000);/*wait for an icmp message come back */
		err=getsockopt(sock,(family == PF_INET6) ? IPPROTO_IPV6 : IPPROTO_IP,(family == PF_INET6) ? IPV6_MTU : IP_MTU,&new_mtu,&optlen);
		if (err!=0){
			ms_error("getsockopt(): %s",strerror(errno));
			err = close(sock);
			if (err!=0)
				ms_error("close(): %s", strerror(errno));
			return -1;
		}else{
			ms_message("Partial MTU discovered : %i",new_mtu);
			if (new_mtu==mtu) break;
			else mtu=new_mtu;
		}
		retry++;
	}while(retry<10);
	
	ms_message("mtu to %s is %i",host,mtu);

	err = close(sock);
	if (err!=0)
		ms_error("close() %s", strerror(errno));
	return mtu;
}
void account_create_on_server(Account *account, const LinphoneProxyConfig *refcfg) {
    LinphoneCoreVTable vtable= {0};
    LinphoneCore *lc;
    LinphoneAddress *tmp_identity=linphone_address_clone(account->modified_identity);
    LinphoneProxyConfig *cfg;
    LinphoneAuthInfo *ai;
    char *tmp;
    LinphoneAddress *server_addr;
    LCSipTransports tr;

    vtable.registration_state_changed=account_created_on_server_cb;
    vtable.auth_info_requested=account_created_auth_requested_cb;
    lc=configure_lc_from(&vtable,bc_tester_get_resource_dir_prefix(),NULL,account);
    tr.udp_port=LC_SIP_TRANSPORT_RANDOM;
    tr.tcp_port=LC_SIP_TRANSPORT_RANDOM;
    tr.tls_port=LC_SIP_TRANSPORT_RANDOM;
    linphone_core_set_sip_transports(lc,&tr);

    cfg=linphone_core_create_proxy_config(lc);
    linphone_address_set_secure(tmp_identity, FALSE);
    linphone_address_set_password(tmp_identity,account->password);
    linphone_address_set_header(tmp_identity,"X-Create-Account","yes");
    tmp=linphone_address_as_string(tmp_identity);
    linphone_proxy_config_set_identity(cfg,tmp);
    ms_free(tmp);
    linphone_address_unref(tmp_identity);

    server_addr=linphone_address_new(linphone_proxy_config_get_server_addr(refcfg));
    linphone_address_set_secure(server_addr, FALSE);
    linphone_address_set_transport(server_addr,LinphoneTransportTcp); /*use tcp for account creation, we may not have certificates configured at this stage*/
    linphone_address_set_port(server_addr,0);
    tmp=linphone_address_as_string(server_addr);
    linphone_proxy_config_set_server_addr(cfg,tmp);
    ms_free(tmp);
    linphone_address_unref(server_addr);
    linphone_proxy_config_set_expires(cfg,3*3600); //accounts are valid 3 hours

    linphone_core_add_proxy_config(lc,cfg);

    if (wait_for_until(lc,NULL,&account->auth_requested,1,10000)==FALSE) {
        ms_fatal("Account for %s could not be created on server.", linphone_proxy_config_get_identity(refcfg));
    }
    linphone_proxy_config_edit(cfg);
    tmp_identity=linphone_address_clone(account->modified_identity);
    linphone_address_set_secure(tmp_identity, FALSE);
    tmp=linphone_address_as_string(tmp_identity);
    linphone_proxy_config_set_identity(cfg,tmp); /*remove the X-Create-Account header*/
    linphone_address_unref(tmp_identity);
    ms_free(tmp);
    linphone_proxy_config_done(cfg);

    ai=linphone_auth_info_new(linphone_address_get_username(account->modified_identity),
                              NULL,
                              account->password,NULL,NULL,linphone_address_get_domain(account->modified_identity));
    linphone_core_add_auth_info(lc,ai);
    linphone_auth_info_destroy(ai);

    if (wait_for_until(lc,NULL,&account->created,1,3000)==FALSE) {
        ms_fatal("Account for %s is not working on server.", linphone_proxy_config_get_identity(refcfg));
    }
    linphone_core_remove_proxy_config(lc,cfg);
    linphone_proxy_config_unref(cfg);
    if (wait_for_until(lc,NULL,&account->done,1,3000)==FALSE) {
        ms_error("Account creation could not clean the registration context.");
    }
    linphone_core_destroy(lc);
}
static void ms_opus_enc_process(MSFilter *f) {
	OpusEncData *d = (OpusEncData *)f->data;
	OpusRepacketizer *repacketizer = NULL;
	mblk_t *om = NULL;
	int packet_size, pcm_buffer_size;
	int max_frame_byte_size, ptime = 20;
	int frame_count = 0, frame_size = 0;
	opus_int32 total_length = 0;
	uint8_t *repacketizer_frame_buffer[MAX_INPUT_FRAMES];
	int i;
	ms_filter_lock(f);
	ptime = d->ptime;
	packet_size = d->samplerate * ptime / 1000; /* in samples */
	ms_filter_unlock(f);
	
	switch (ptime) {
		case 10:
			frame_size = d->samplerate * 10 / 1000;
			frame_count = 1;
			break;
		case 20:
			frame_size = d->samplerate * 20 / 1000;
			frame_count = 1;
			break;
		case 40:
			frame_size = d->samplerate * 40 / 1000;
			frame_count = 1;
			break;
		case 60:
			frame_size = d->samplerate * 60 / 1000;
			frame_count = 1;
			break;
		case 80:
			frame_size = d->samplerate * 40 / 1000;
			frame_count = 2;
			break;
		case 100:
			frame_size = d->samplerate * 20 / 1000;
			frame_count = 5;
			break;
		case 120:
			frame_size = d->samplerate * 60 / 1000;
			frame_count = 2;
			break;
		default:
			frame_size = d->samplerate * 20 / 1000;
			frame_count = 1;
	}

	max_frame_byte_size = MAX_BYTES_PER_MS * ptime/frame_count;

	pcm_buffer_size = d->channels * frame_size * SIGNAL_SAMPLE_SIZE;
	if (pcm_buffer_size > d->pcmbufsize){
		if (d->pcmbuffer) ms_free(d->pcmbuffer);
		d->pcmbuffer = ms_malloc(pcm_buffer_size);
		d->pcmbufsize = pcm_buffer_size;
	}
	for (i=0; i<MAX_INPUT_FRAMES; i++) {
		repacketizer_frame_buffer[i]=NULL;
	}

	ms_bufferizer_put_from_queue(d->bufferizer, f->inputs[0]);
	while (ms_bufferizer_get_avail(d->bufferizer) >= (size_t)(d->channels * packet_size * SIGNAL_SAMPLE_SIZE)) {
		opus_int32 ret = 0;

		if (frame_count == 1) { /* One Opus frame, not using the repacketizer */
			om = allocb(max_frame_byte_size, 0);
			ms_bufferizer_read(d->bufferizer, d->pcmbuffer, frame_size * SIGNAL_SAMPLE_SIZE * d->channels);
			ret = opus_encode(d->state, (opus_int16 *)d->pcmbuffer, frame_size, om->b_wptr, max_frame_byte_size);
			if (ret < 0) {
				freemsg(om);
				om=NULL;
				ms_error("Opus encoder error: %s", opus_strerror(ret));
				break;
			} else {
				total_length = ret;
				om->b_wptr += total_length;
			}
		} else if(frame_count > 1) { /* We have multiple Opus frames we will use the opus repacketizer */

			repacketizer = opus_repacketizer_create();
			opus_repacketizer_init(repacketizer);

			/* Do not include FEC/LBRR in any frame after the first one since it will be sent with the previous one */
			ret = opus_encoder_ctl(d->state, OPUS_SET_INBAND_FEC(0));
			if (ret != OPUS_OK) {
				ms_error("could not set inband FEC to opus encoder: %s", opus_strerror(ret));
			}
			for (i=0; i<frame_count; i++) {
				if(frame_count == i+1){ /* if configured, reactivate FEC on the last frame to tell the encoder he should restart saving LBRR frames */
					ret = opus_encoder_ctl(d->state, OPUS_SET_INBAND_FEC(d->useinbandfec));
					if (ret != OPUS_OK) {
						ms_error("could not set inband FEC to opus encoder: %s", opus_strerror(ret));
					}
				}
				if (!repacketizer_frame_buffer[i]) repacketizer_frame_buffer[i] = ms_malloc(max_frame_byte_size); /* the repacketizer need the pointer to packet to remain valid, so we shall have a buffer for each coded frame */
				ms_bufferizer_read(d->bufferizer, d->pcmbuffer, frame_size * SIGNAL_SAMPLE_SIZE * d->channels);
				ret = opus_encode(d->state, (opus_int16 *)d->pcmbuffer, frame_size, repacketizer_frame_buffer[i], max_frame_byte_size);
				if (ret < 0) {
					ms_error("Opus encoder error: %s", opus_strerror(ret));
					break;
				} else if (ret > 0) {
					int err = opus_repacketizer_cat(repacketizer, repacketizer_frame_buffer[i], ret); /* add the encoded frame into the current packet */
					if (err != OPUS_OK) {
						ms_error("Opus repacketizer error: %s", opus_strerror(err));
						break;
					}
					total_length += ret;
				}
			}

			om = allocb(total_length + frame_count + 1, 0); /* opus repacketizer API: allocate at least number of frame + size of all data added before */
			ret = opus_repacketizer_out(repacketizer, om->b_wptr, total_length+frame_count);
			if(ret < 0){
				freemsg(om);
				om=NULL;
				ms_error("Opus repacketizer out error: %s", opus_strerror(ret));
			} else {
				om->b_wptr += ret;
			}
			opus_repacketizer_destroy(repacketizer);
			for (i=0; i<frame_count; i++) {
				if (repacketizer_frame_buffer[i] != NULL) {
					ms_free(repacketizer_frame_buffer[i]);
				}
			}
		}

		if(om) { /* we have an encoded output message */
			mblk_set_timestamp_info(om, d->ts);
			ms_bufferizer_fill_current_metas(d->bufferizer, om);
			ms_queue_put(f->outputs[0], om);
			d->ts += packet_size*48000/d->samplerate; /* RFC payload RTP opus 03 - section 4: RTP timestamp multiplier : WARNING works only with sr at 48000 */
			total_length = 0;
		}
	}

}
static void ms_opus_enc_preprocess(MSFilter *f) {
	int error;
	int opusComplexity = -1;
	const char *env = NULL;

	OpusEncData *d = (OpusEncData *)f->data;
	/* create the encoder */
	d->state = opus_encoder_create(d->samplerate, d->channels, d->application, &error);
	if (error != OPUS_OK) {
		ms_error("Opus encoder creation failed: %s", opus_strerror(error));
		return;
	}

	
#ifndef MS2_WINDOWS_UNIVERSAL
	env = getenv("MS2_OPUS_COMPLEXITY");
#endif
	if (env != NULL) {
		opusComplexity = atoi(env);
		if (opusComplexity < -1)
			opusComplexity = -1; /*our default value*/
		if (opusComplexity > 10)
			opusComplexity = 10;
	}
	if (opusComplexity == -1){
#if defined(__arm__) || defined(_M_ARM)
		int cpucount = ms_factory_get_cpu_count(f->factory);
		if (cpucount == 1){
			opusComplexity = 0; /* set complexity to 0 for single processor arm devices */ 
		}else if (cpucount == 2) {
			opusComplexity = 5; 
		}
#endif
	}
	if (opusComplexity != -1){
		ms_message("Set Opus complexity to %d", opusComplexity);
		opus_encoder_ctl(d->state, OPUS_SET_COMPLEXITY(opusComplexity));
	} /*otherwise we let opus with its default value, which is 9*/
	
	error = opus_encoder_ctl(d->state, OPUS_SET_PACKET_LOSS_PERC(10));
	if (error != OPUS_OK) {
		ms_error("Could not set default loss percentage to opus encoder: %s", opus_strerror(error));
	}

	/* set the encoder parameters: VBR, IN_BAND_FEC, DTX and bitrate settings */
	ms_opus_enc_set_vbr(f);
	ms_opus_enc_set_inbandfec(f);
	ms_opus_enc_set_dtx(f);
	/* if decoder prefers mono signal, force encoder to output mono signal */
	if (d->stereo == 0) {
		error = opus_encoder_ctl(d->state, OPUS_SET_FORCE_CHANNELS(1));
		if (error != OPUS_OK) {
			ms_error("could not force mono channel to opus encoder: %s", opus_strerror(error));
		}
		if (d->channels == 2) ms_message("Opus encoder configured to encode mono despite it is feed with stereo.");
	}else if (d->channels == 2){
		ms_message("Opus encoder configured to encode stereo.");
	}

	ms_filter_lock(f);
	// set bitrate wasn't call, compute it with the default network bitrate (36000)
	if (d->bitrate==-1) {
		compute_max_bitrate(d, 0);
	}
	apply_max_bitrate(d);
	ms_filter_unlock(f);
}
int ms_bitrate_driver_execute_action(MSBitrateDriver *obj, const MSRateControlAction *action){
	if (obj->desc->execute_action)
		return obj->desc->execute_action(obj,action);
	else ms_error("Driver does not implement execute_action");
	return -1;
}
Exemple #15
0
void sal_process_incoming_message(SalOp *op,const belle_sip_request_event_t *event){
	belle_sip_request_t* req = belle_sip_request_event_get_request(event);
	belle_sip_server_transaction_t* server_transaction = belle_sip_provider_create_server_transaction(op->base.root->prov,req);
	belle_sip_header_address_t* address;
	belle_sip_header_from_t* from_header;
	belle_sip_header_content_type_t* content_type;
	belle_sip_response_t* resp;
	belle_sip_header_call_id_t* call_id = belle_sip_message_get_header_by_type(req,belle_sip_header_call_id_t);
	belle_sip_header_cseq_t* cseq = belle_sip_message_get_header_by_type(req,belle_sip_header_cseq_t);
	belle_sip_header_date_t *date=belle_sip_message_get_header_by_type(req,belle_sip_header_date_t);
	char* from;
	bool_t plain_text=FALSE;
	bool_t external_body=FALSE;

	from_header=belle_sip_message_get_header_by_type(BELLE_SIP_MESSAGE(req),belle_sip_header_from_t);
	content_type=belle_sip_message_get_header_by_type(BELLE_SIP_MESSAGE(req),belle_sip_header_content_type_t);
	if (content_type && ((plain_text=is_plain_text(content_type))
						|| (external_body=is_external_body(content_type)))) {
		SalMessage salmsg;
		char message_id[256]={0};
	
		if (op->pending_server_trans) belle_sip_object_unref(op->pending_server_trans);
		op->pending_server_trans=server_transaction;
		belle_sip_object_ref(op->pending_server_trans);
	
		address=belle_sip_header_address_create(belle_sip_header_address_get_displayname(BELLE_SIP_HEADER_ADDRESS(from_header))
				,belle_sip_header_address_get_uri(BELLE_SIP_HEADER_ADDRESS(from_header)));
		from=belle_sip_object_to_string(BELLE_SIP_OBJECT(address));
		snprintf(message_id,sizeof(message_id)-1,"%s%i"
				,belle_sip_header_call_id_get_call_id(call_id)
				,belle_sip_header_cseq_get_seq_number(cseq));
		salmsg.from=from;
		salmsg.text=plain_text?belle_sip_message_get_body(BELLE_SIP_MESSAGE(req)):NULL;
		salmsg.url=NULL;
		if (external_body && belle_sip_parameters_get_parameter(BELLE_SIP_PARAMETERS(content_type),"URL")) {
			size_t url_length=strlen(belle_sip_parameters_get_parameter(BELLE_SIP_PARAMETERS(content_type),"URL"));
			salmsg.url = ms_strdup(belle_sip_parameters_get_parameter(BELLE_SIP_PARAMETERS(content_type),"URL")+1); /* skip first "*/
			((char*)salmsg.url)[url_length-2]='\0'; /*remove trailing "*/
		}
		salmsg.message_id=message_id;
		salmsg.time=date ? belle_sip_header_date_get_time(date) : time(NULL);
		op->base.root->callbacks.text_received(op,&salmsg);
		belle_sip_object_unref(address);
		belle_sip_free(from);
		if (salmsg.url) ms_free((char*)salmsg.url);
	} else if (content_type && is_im_iscomposing(content_type)) {
		SalIsComposing saliscomposing;
		address=belle_sip_header_address_create(belle_sip_header_address_get_displayname(BELLE_SIP_HEADER_ADDRESS(from_header))
				,belle_sip_header_address_get_uri(BELLE_SIP_HEADER_ADDRESS(from_header)));
		from=belle_sip_object_to_string(BELLE_SIP_OBJECT(address));
		saliscomposing.from=from;
		saliscomposing.text=belle_sip_message_get_body(BELLE_SIP_MESSAGE(req));
		op->base.root->callbacks.is_composing_received(op,&saliscomposing);
		resp = belle_sip_response_create_from_request(req,200);
		belle_sip_server_transaction_send_response(server_transaction,resp);
		belle_sip_object_unref(address);
		belle_sip_free(from);
	} else {
		ms_error("Unsupported MESSAGE with content type [%s/%s]",belle_sip_header_content_type_get_type(content_type)
				,belle_sip_header_content_type_get_subtype(content_type));
		resp = belle_sip_response_create_from_request(req,415);
		add_message_accept((belle_sip_message_t*)resp);
		belle_sip_server_transaction_send_response(server_transaction,resp);
		return;
	}
}
Exemple #16
0
static void aq_start_w(MSFilter * f)
{
    AQData *d = (AQData *) f->data;
    if (d->write_started == FALSE) {
        OSStatus aqresult;
#if TARGET_OS_IPHONE
        aqresult = AudioSessionSetActive(true);
        check_aqresult(aqresult,"AudioSessionSetActive");

        UInt32 audioCategory;

        audioCategory= kAudioSessionCategory_AmbientSound;
        ms_message("Configuring audio session for play back");
        aqresult =AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, sizeof(audioCategory), &audioCategory);
        check_aqresult(aqresult,"Configuring audio session ");
#endif
        d->writeAudioFormat.mSampleRate = d->rate;
        d->writeAudioFormat.mFormatID = kAudioFormatLinearPCM;
        d->writeAudioFormat.mFormatFlags =
            kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
        d->writeAudioFormat.mFramesPerPacket = 1;
        d->writeAudioFormat.mChannelsPerFrame = 1;
        d->writeAudioFormat.mBitsPerChannel = d->bits;
        d->writeAudioFormat.mBytesPerPacket = d->bits / 8;
        d->writeAudioFormat.mBytesPerFrame = d->bits / 8;

        //show_format("data provided to output filter",	&d->writeAudioFormat);
        //show_format("output device", &d->devicewriteFormat);

        memcpy(&d->devicewriteFormat, &d->writeAudioFormat,
               sizeof(d->writeAudioFormat));
        d->writeBufferByteSize =
            kSecondsPerBuffer * d->devicewriteFormat.mSampleRate *
            (d->devicewriteFormat.mBitsPerChannel / 8) *
            d->devicewriteFormat.mChannelsPerFrame;

#if 0
        aqresult = AudioConverterNew(&d->writeAudioFormat,
                                     &d->devicewriteFormat,
                                     &d->writeAudioConverter);
        if (aqresult != noErr) {
            ms_error("d->writeAudioConverter = %d", aqresult);
            d->writeAudioConverter = NULL;
        }
#endif

        // create the playback audio queue object
        aqresult = AudioQueueNewOutput(&d->devicewriteFormat, writeCallback, d, NULL,	/*CFRunLoopGetCurrent () */
                                       NULL,	/*kCFRunLoopCommonModes */
                                       0,	// run loop flags
                                       &d->writeQueue);
        if (aqresult != noErr) {
            ms_error("AudioQueueNewOutput = %ld", aqresult);
        }

        AudioQueueSetParameter (d->writeQueue,
                                kAudioQueueParam_Volume,
                                gain_volume_out);

        if (d->uidname!=NULL) {
            char uidname[256];
            CFStringGetCString(d->uidname, uidname, 256,
                               CFStringGetSystemEncoding());
            ms_message("AQ: using uidname:%s", uidname);
            aqresult =
                AudioQueueSetProperty(d->writeQueue,
                                      kAudioQueueProperty_CurrentDevice,
                                      &d->uidname, sizeof(CFStringRef));
            if (aqresult != noErr) {
                ms_error
                ("AudioQueueSetProperty on kAudioQueueProperty_CurrentDevice %ld",
                 aqresult);
            }
        }

        setupWrite(f);
        d->curWriteBuffer = 0;
    }
}
Exemple #17
0
/*	inputs[0]= reference signal from far end (sent to soundcard)
 *	inputs[1]= near speech & echo signal (read from soundcard)
 *	outputs[0]=  is a copy of inputs[0] to be sent to soundcard
 *	outputs[1]=  near end speech, echo removed - towards far end
*/
static void webrtc_aec_process(MSFilter *f)
{
	WebRTCAECState *s = (WebRTCAECState *) f->data;
	int nbytes = s->framesize * 2;
	mblk_t *refm;
	uint8_t *ref, *echo;

	if (s->bypass_mode) {
		while ((refm = ms_queue_get(f->inputs[0])) != NULL) {
			ms_queue_put(f->outputs[0], refm);
		}
		while ((refm = ms_queue_get(f->inputs[1])) != NULL) {
			ms_queue_put(f->outputs[1], refm);
		}
		return;
	}

	if (f->inputs[0] != NULL) {
		if (s->echostarted) {
			while ((refm = ms_queue_get(f->inputs[0])) != NULL) {
				refm=ms_audio_flow_controller_process(&s->afc,refm);
				if (refm){
					mblk_t *cp=dupmsg(refm);
					ms_bufferizer_put(&s->delayed_ref,cp);
					ms_bufferizer_put(&s->ref,refm);
				}
			}
		} else {
			ms_warning("Getting reference signal but no echo to synchronize on.");
			ms_queue_flush(f->inputs[0]);
		}
	}

	ms_bufferizer_put_from_queue(&s->echo, f->inputs[1]);

	ref = (uint8_t *) alloca(nbytes);
	echo = (uint8_t *) alloca(nbytes);
	while (ms_bufferizer_read(&s->echo, echo, nbytes) >= nbytes) {
		mblk_t *oecho = allocb(nbytes, 0);
		int avail;
		int avail_samples;

		if (!s->echostarted) s->echostarted = TRUE;
		if ((avail = ms_bufferizer_get_avail(&s->delayed_ref)) < ((s->nominal_ref_samples * 2) + nbytes)) {
			/*we don't have enough to read in a reference signal buffer, inject silence instead*/
			refm = allocb(nbytes, 0);
			memset(refm->b_wptr, 0, nbytes);
			refm->b_wptr += nbytes;
			ms_bufferizer_put(&s->delayed_ref, refm);
			ms_queue_put(f->outputs[0], dupmsg(refm));
			if (!s->using_zeroes) {
				ms_warning("Not enough ref samples, using zeroes");
				s->using_zeroes = TRUE;
			}
		} else {
			if (s->using_zeroes) {
				ms_message("Samples are back.");
				s->using_zeroes = FALSE;
			}
			/* read from our no-delay buffer and output */
			refm = allocb(nbytes, 0);
			if (ms_bufferizer_read(&s->ref, refm->b_wptr, nbytes) == 0) {
				ms_fatal("Should never happen");
			}
			refm->b_wptr += nbytes;
			ms_queue_put(f->outputs[0], refm);
		}

		/*now read a valid buffer of delayed ref samples*/
		if (ms_bufferizer_read(&s->delayed_ref, ref, nbytes) == 0) {
			ms_fatal("Should never happen");
		}
		avail -= nbytes;
		avail_samples = avail / 2;
		if (avail_samples < s->min_ref_samples || s->min_ref_samples == -1) {
			s->min_ref_samples = avail_samples;
		}

#ifdef EC_DUMP
		if (s->reffile)
			fwrite(ref, nbytes, 1, s->reffile);
		if (s->echofile)
			fwrite(echo, nbytes, 1, s->echofile);
#endif
		if (WebRtcAecm_BufferFarend(s->aecmInst, (const WebRtc_Word16 *) ref, s->framesize)!=0)
			ms_error("WebRtcAecm_BufferFarend() failed.");
		if (WebRtcAecm_Process(s->aecmInst, (const WebRtc_Word16 *) echo, NULL, (WebRtc_Word16 *) oecho->b_wptr, s->framesize, 0)!=0)
			ms_error("WebRtcAecm_Process() failed.");
#ifdef EC_DUMP
		if (s->cleanfile)
			fwrite(oecho->b_wptr, nbytes, 1, s->cleanfile);
#endif
		oecho->b_wptr += nbytes;
		ms_queue_put(f->outputs[1], oecho);
	}

	/*verify our ref buffer does not become too big, meaning that we are receiving more samples than we are sending*/
	if ((((uint32_t) (f->ticker->time - s->flow_control_time)) >= flow_control_interval_ms) && (s->min_ref_samples != -1)) {
		int diff = s->min_ref_samples - s->nominal_ref_samples;
		if (diff > (nbytes / 2)) {
			int purge = diff - (nbytes / 2);
			ms_warning("echo canceller: we are accumulating too much reference signal, need to throw out %i samples", purge);
			ms_audio_flow_controller_set_target(&s->afc, purge, (flow_control_interval_ms * s->samplerate) / 1000);
		}
		s->min_ref_samples = -1;
		s->flow_control_time = f->ticker->time;
	}
}
int audio_stream_start_full(AudioStream *stream, RtpProfile *profile, const char *remip,int remport,
	int rem_rtcp_port, int payload,int jitt_comp, const char *infile, const char *outfile,
	MSSndCard *playcard, MSSndCard *captcard, bool_t use_ec)
{
	RtpSession *rtps=stream->session;
	PayloadType *pt;
	int tmp;
	MSConnectionHelper h;

	rtp_session_set_profile(rtps,profile);
	if (remport>0) rtp_session_set_remote_addr_full(rtps,remip,remport,rem_rtcp_port);
	rtp_session_set_payload_type(rtps,payload);
	rtp_session_set_jitter_compensation(rtps,jitt_comp);

	if (remport>0)
		ms_filter_call_method(stream->rtpsend,MS_RTP_SEND_SET_SESSION,rtps);
	stream->rtprecv=ms_filter_new(MS_RTP_RECV_ID);
	ms_filter_call_method(stream->rtprecv,MS_RTP_RECV_SET_SESSION,rtps);
	stream->session=rtps;

	stream->dtmfgen=ms_filter_new(MS_DTMF_GEN_ID);
	rtp_session_signal_connect(rtps,"telephone-event",(RtpCallback)on_dtmf_received,(unsigned long)stream);
	rtp_session_signal_connect(rtps,"payload_type_changed",(RtpCallback)payload_type_changed,(unsigned long)stream);

	/* creates the local part */
	if (captcard!=NULL) stream->soundread=ms_snd_card_create_reader(captcard);
	else {
		stream->soundread=ms_filter_new(MS_FILE_PLAYER_ID);
		stream->read_resampler=ms_filter_new(MS_RESAMPLE_ID);
		if (infile!=NULL) audio_stream_play(stream,infile);
	}
	if (playcard!=NULL) stream->soundwrite=ms_snd_card_create_writer(playcard);
	else {
		stream->soundwrite=ms_filter_new(MS_FILE_REC_ID);
		if (outfile!=NULL) audio_stream_record(stream,outfile);
	}

	/* creates the couple of encoder/decoder */
	pt=rtp_profile_get_payload(profile,payload);
	if (pt==NULL){
		ms_error("audiostream.c: undefined payload type.");
		return -1;
	}
	stream->encoder=ms_filter_create_encoder(pt->mime_type);
	stream->decoder=ms_filter_create_decoder(pt->mime_type);
	if ((stream->encoder==NULL) || (stream->decoder==NULL)){
		/* big problem: we have not a registered codec for this payload...*/
		ms_error("mediastream.c: No decoder available for payload %i.",payload);
		return -1;
	}

	if (stream->el_type!=ELInactive || stream->use_gc || stream->use_ng){
		stream->volsend=ms_filter_new(MS_VOLUME_ID);
		stream->volrecv=ms_filter_new(MS_VOLUME_ID);
		if (stream->el_type!=ELInactive){
			if (stream->el_type==ELControlFull) {
				/* also reduce speaker gain when no signal - same parameters as std. noise gate */
				int tmp=1;
				ms_filter_call_method(stream->volrecv,MS_VOLUME_ENABLE_NOISE_GATE,&tmp);
			}
			ms_filter_call_method(stream->volsend,MS_VOLUME_SET_PEER,stream->volrecv);
		}
		if (stream->use_ng){
			int tmp=1;
			ms_filter_call_method(stream->volsend,MS_VOLUME_ENABLE_NOISE_GATE,&tmp);
		}
	}

	if (stream->use_agc || stream->use_nr){
		int tmp=1;
		if (stream->volsend==NULL)
			stream->volsend=ms_filter_new(MS_VOLUME_ID);

		if(stream->use_agc) ms_filter_call_method(stream->volsend,MS_VOLUME_ENABLE_AGC,&tmp);
		/*Noise Reduction*/
		if(stream->use_nr) ms_filter_call_method(stream->volsend,MS_VOLUME_ENABLE_NR,&tmp);
	}

	/* give the sound filters some properties */
	if (ms_filter_call_method(stream->soundread,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate) != 0) {
		/* need to add resampler*/
		if (stream->read_resampler == NULL) stream->read_resampler=ms_filter_new(MS_RESAMPLE_ID);
	}

	if (ms_filter_call_method(stream->soundwrite,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate) != 0) {
		/* need to add resampler*/
		if (stream->write_resampler == NULL) stream->write_resampler=ms_filter_new(MS_RESAMPLE_ID);
	}

	tmp=1;
	ms_filter_call_method(stream->soundwrite,MS_FILTER_SET_NCHANNELS, &tmp);

	if(stream->record_enabled)
	{
		stream->filewriter = ms_filter_new(MS_FILE_REC_ID);
		stream->recordmixer= ms_filter_new(MS_AUDIO_MIXER_ID);
		stream->mic_tee = ms_filter_new(MS_TEE_ID);
		stream->spk_tee = ms_filter_new(MS_TEE_ID);
		ms_filter_call_method(stream->filewriter,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate);
		ms_filter_call_method(stream->recordmixer,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate);
		tmp=1;
		ms_filter_call_method(stream->recordmixer,MS_FILTER_SET_NCHANNELS,&tmp);
	}

	/*configure the echo canceller if required */
	if (use_ec) {
		stream->ec=ms_filter_new(MS_SPEEX_EC_ID);
		ms_filter_call_method(stream->ec,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate);
		if (stream->ec_tail_len!=0)
			ms_filter_call_method(stream->ec,MS_ECHO_CANCELLER_SET_TAIL_LENGTH,&stream->ec_tail_len);
		if (stream->ec_delay!=0){
			ms_filter_call_method(stream->ec,MS_ECHO_CANCELLER_SET_DELAY,&stream->ec_delay);
		}else{
			/*configure from latency of sound card in case it is availlable */
			int latency=0;
			ms_filter_call_method(stream->soundread,MS_FILTER_GET_LATENCY,&latency);
			latency-=30; /*keep 30 milliseconds security margin*/
			if (latency<0) latency=0;
			ms_filter_call_method(stream->ec,MS_ECHO_CANCELLER_SET_DELAY,&latency);
		}
		if (stream->ec_framesize!=0)
			ms_filter_call_method(stream->ec,MS_ECHO_CANCELLER_SET_FRAMESIZE,&stream->ec_framesize);
	}

	/* give the encoder/decoder some parameters*/
	ms_filter_call_method(stream->encoder,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate);
	ms_message("Payload's bitrate is %i",pt->normal_bitrate);
	if (pt->normal_bitrate>0){
		ms_message("Setting audio encoder network bitrate to %i",pt->normal_bitrate);
		ms_filter_call_method(stream->encoder,MS_FILTER_SET_BITRATE,&pt->normal_bitrate);
	}
	ms_filter_call_method(stream->decoder,MS_FILTER_SET_SAMPLE_RATE,&pt->clock_rate);

	if (pt->send_fmtp!=NULL) ms_filter_call_method(stream->encoder,MS_FILTER_ADD_FMTP, (void*)pt->send_fmtp);
	if (pt->recv_fmtp!=NULL) ms_filter_call_method(stream->decoder,MS_FILTER_ADD_FMTP,(void*)pt->recv_fmtp);

	/*create the equalizer*/
	stream->equalizer=ms_filter_new(MS_EQUALIZER_ID);
	tmp=stream->eq_active;
	ms_filter_call_method(stream->equalizer,MS_EQUALIZER_SET_ACTIVE,&tmp);
	/*configure resampler if needed*/
	if (stream->read_resampler){
		audio_stream_configure_resampler(stream->read_resampler,stream->soundread,stream->rtpsend);
	}

	if (stream->write_resampler){
		audio_stream_configure_resampler(stream->write_resampler,stream->rtprecv,stream->soundwrite);
	}
	/* and then connect all */
	/* tip: draw yourself the picture if you don't understand */

	/*sending graph*/
	ms_connection_helper_start(&h);
	ms_connection_helper_link(&h,stream->soundread,-1,0);
	if (stream->read_resampler)
		ms_connection_helper_link(&h,stream->read_resampler,0,0);
	if (stream->ec)
		ms_connection_helper_link(&h,stream->ec,1,1);
	if (stream->volsend)
		ms_connection_helper_link(&h,stream->volsend,0,0);
	if(stream->mic_tee)
		ms_connection_helper_link(&h,stream->mic_tee,0,0);
	if(stream->mic_tee && stream->recordmixer)
		ms_filter_link(stream->mic_tee,1,stream->recordmixer,0);

	ms_connection_helper_link(&h,stream->encoder,0,0);
	ms_connection_helper_link(&h,stream->rtpsend,0,-1);

	/*receiving graph*/
	ms_connection_helper_start(&h);
	ms_connection_helper_link(&h,stream->rtprecv,-1,0);
	ms_connection_helper_link(&h,stream->decoder,0,0);
	ms_connection_helper_link(&h,stream->dtmfgen,0,0);
	if (stream->equalizer)
		ms_connection_helper_link(&h,stream->equalizer,0,0);
	if (stream->volrecv)
		ms_connection_helper_link(&h,stream->volrecv,0,0);
	if (stream->ec)
		ms_connection_helper_link(&h,stream->ec,0,0);
	if (stream->write_resampler)
		ms_connection_helper_link(&h,stream->write_resampler,0,0);
	if(stream->spk_tee)
		ms_connection_helper_link(&h,stream->spk_tee,0,0);
	if(stream->mic_tee && stream->recordmixer)
		ms_filter_link(stream->spk_tee,1,stream->recordmixer,1);
	ms_connection_helper_link(&h,stream->soundwrite,0,-1);

	if (stream->filewriter && stream->spk_tee && stream->mic_tee && stream->recordmixer){
		ms_filter_link(stream->recordmixer,0,stream->filewriter,0);
	}
	/* create ticker */
	stream->ticker=ms_ticker_new();
	ms_ticker_set_name(stream->ticker,"Audio MSTicker");
	ms_ticker_attach(stream->ticker,stream->soundread);
	ms_ticker_attach(stream->ticker,stream->rtprecv);

	return 0;
}
Exemple #19
0
int ms_discover_mtu(const char *host)
{
  int i;

	struct addrinfo hints,*ai=NULL;
	char port[10];
  char ipaddr[INET6_ADDRSTRLEN];
  int err;

  HANDLE hIcmp;
  unsigned long target_addr;

  struct ip_option_information ip_opts;
  unsigned char reply_buffer[10000];

  if (!m_IcmpInst)
	{
		m_IcmpInst = LoadLibrary("icmp.dll");
		if (m_IcmpInst)
		{
			pIcmpCloseHandle = (ICMPCLOSEHANDLE)GetProcAddress(m_IcmpInst, "IcmpCloseHandle");
			pIcmpCreateFile  = (ICMPCREATEFILE) GetProcAddress(m_IcmpInst, "IcmpCreateFile");
			pIcmpSendEcho =	   (ICMPSENDECHO)   GetProcAddress(m_IcmpInst, "IcmpSendEcho");
		}
	}

  hIcmp = pIcmpCreateFile();

	memset(&hints,0,sizeof(hints));
	hints.ai_family = PF_INET;
	hints.ai_socktype = SOCK_DGRAM;
	
	snprintf(port,sizeof(port),"0");
	err=getaddrinfo(host,port,&hints,&ai);
	if (err!=0){
    pIcmpCloseHandle( hIcmp );
		ms_error("getaddrinfo(): error\n");
		return -1;
	}
  getnameinfo (ai->ai_addr, ai->ai_addrlen, ipaddr, sizeof (ipaddr), port,
               sizeof (port), NI_NUMERICHOST | NI_NUMERICSERV);
	freeaddrinfo(ai);

  target_addr=inet_addr(ipaddr);


  /* Prepare the IP options */
  memset(&ip_opts,0,sizeof(ip_opts));
  ip_opts.Ttl=30;
  ip_opts.Flags = IP_FLAG_DF | IP_OPT_ROUTER_ALERT;


  // ignore icmpbuff data contents 
  for (i=0;mtus[i]!=0;i++)
  {
    char icmpbuff[2048];
    char *icmp_data = icmpbuff;

    int status = -1;
    if (pIcmpSendEcho)
      status=pIcmpSendEcho(hIcmp,
                          target_addr,
                          (LPVOID)icmp_data,
                          mtus[i]-60, /* icmp_data_size */
                          &ip_opts,
                          reply_buffer,
                          sizeof(reply_buffer),
                          3000L); // 3 seconds
    if (status || GetLastError() == IP_REQ_TIMED_OUT)
    {
      pIcmpCloseHandle( hIcmp );
      return mtus[i];
    }
  }

  pIcmpCloseHandle( hIcmp );

  return -1;
}
Exemple #20
0
static void ms_opus_enc_process(MSFilter *f) {
	OpusEncData *d = (OpusEncData *)f->data;
	mblk_t *im;
	mblk_t *om = NULL;
	int i;
	int frameNumber, packet_size;
	uint8_t *signalFrameBuffer = NULL;
	uint8_t *codedFrameBuffer[MAX_INPUT_FRAMES];
	OpusRepacketizer *rp = opus_repacketizer_create();
	opus_int32 ret = 0;
	opus_int32 totalLength = 0;
	int frame_size = d->samplerate * FRAME_LENGTH / 1000; /* in samples */

	// lock the access while getting ptime
	ms_filter_lock(f);
	frameNumber = d->ptime/FRAME_LENGTH; /* encode 20ms frames, ptime is a multiple of 20ms */
	packet_size = d->samplerate * d->ptime / 1000; /* in samples */
	ms_filter_unlock(f);


	while ((im = ms_queue_get(f->inputs[0])) != NULL) {
		ms_bufferizer_put(d->bufferizer, im);
	}

	for (i=0; i<MAX_INPUT_FRAMES; i++) {
		codedFrameBuffer[i]=NULL;
	}
	while (ms_bufferizer_get_avail(d->bufferizer) >= (d->channels * packet_size * SIGNAL_SAMPLE_SIZE)) {
		totalLength = 0;
		opus_repacketizer_init(rp);
		for (i=0; i<frameNumber; i++) { /* encode 20ms by 20ms and repacketize all of them together */
			if (!codedFrameBuffer[i]) codedFrameBuffer[i] = ms_malloc(MAX_BYTES_PER_FRAME); /* the repacketizer need the pointer to packet to remain valid, so we shall have a buffer for each coded frame */
			if (!signalFrameBuffer) signalFrameBuffer = ms_malloc(frame_size * SIGNAL_SAMPLE_SIZE * d->channels);

			ms_bufferizer_read(d->bufferizer, signalFrameBuffer, frame_size * SIGNAL_SAMPLE_SIZE * d->channels);
			ret = opus_encode(d->state, (opus_int16 *)signalFrameBuffer, frame_size, codedFrameBuffer[i], MAX_BYTES_PER_FRAME);
			if (ret < 0) {
				ms_error("Opus encoder error: %s", opus_strerror(ret));
				break;
			}
			if (ret > 0) {
				int err = opus_repacketizer_cat(rp, codedFrameBuffer[i], ret); /* add the encoded frame into the current packet */
				if (err != OPUS_OK) {
					ms_error("Opus repacketizer error: %s", opus_strerror(err));
					break;
				}
				totalLength += ret;
			}
		}

		if (ret > 0) {
			om = allocb(totalLength+frameNumber + 1, 0); /* opus repacktizer API: allocate at leat number of frame + size of all data added before */
			ret = opus_repacketizer_out(rp, om->b_wptr, totalLength+frameNumber);

			om->b_wptr += ret;
			mblk_set_timestamp_info(om, d->ts);
			ms_bufferizer_fill_current_metas(d->bufferizer, om);
			ms_queue_put(f->outputs[0], om);
			d->ts += packet_size*48000/d->samplerate; /* RFC payload RTP opus 03 - section 4: RTP timestamp multiplier : WARNING works only with sr at 48000 */
			ret = 0;
		}
	}

	opus_repacketizer_destroy(rp);

	if (signalFrameBuffer != NULL) {
		ms_free(signalFrameBuffer);
	}
	for (i=0; i<frameNumber; i++) {
		if (codedFrameBuffer[i] != NULL) {
			ms_free(codedFrameBuffer[i]);
		}
	}
}
Exemple #21
0
OSStatus readRenderProc(void *inRefCon, 
						AudioUnitRenderActionFlags *inActionFlags,
						const AudioTimeStamp *inTimeStamp, 
						UInt32 inBusNumber,
						UInt32 inNumFrames, 
						AudioBufferList *ioData)
{
	CAData *d=(CAData*)inRefCon;
	OSStatus	err = noErr;
	
	err = AudioUnitRender(d->caInAudioUnit, inActionFlags, inTimeStamp, inBusNumber,
						  inNumFrames, d->fAudioBuffer);
	if(err != noErr)
	{
		ms_error("AudioUnitRender %d size = %d", err, d->fAudioBuffer->mBuffers[0].mDataByteSize);
		return err;
	}
	
	UInt32 AvailableOutputBytes = inNumFrames * sizeof (float) * d->caInASBD.mChannelsPerFrame;
    UInt32 propertySize = sizeof (AvailableOutputBytes);
    err = AudioConverterGetProperty (d->caInConverter,
									 kAudioConverterPropertyCalculateOutputBufferSize,
									 &propertySize,
									 &AvailableOutputBytes);
	
	if(err != noErr)
	{
		ms_error("AudioConverterGetProperty kAudioConverterPropertyCalculateOutputBufferSize %d", err);
		return err;
	}
	
	if (AvailableOutputBytes>d->fMSBuffer->mBuffers[0].mDataByteSize)
	{	
		DestroyAudioBufferList(d->fMSBuffer);
		d->fMSBuffer = AllocateAudioBufferList(d->stereo ? 2 : 1,
											   AvailableOutputBytes);
	}
	
	UInt32 ActualOutputFrames = AvailableOutputBytes / ((d->bits / 8) * 1) / d->caInASBD.mChannelsPerFrame;
	err = AudioConverterFillComplexBuffer (d->caInConverter,
										   (AudioConverterComplexInputDataProc)(readACInputProc),
										   inRefCon,
										   &ActualOutputFrames,
										   d->fMSBuffer,
										   NULL);
	if(err != noErr)
	{
		ms_error("readRenderProc:AudioConverterFillComplexBuffer %d", err);
		return err;
	}
	
	mblk_t *rm=NULL;
	rm=allocb(ActualOutputFrames*2,0);
	memcpy(rm->b_wptr, d->fMSBuffer->mBuffers[0].mData, ActualOutputFrames*2);
	rm->b_wptr+=ActualOutputFrames*2;
	
	if (gain_volume_in != 1.0f)
	{
		int16_t *ptr=(int16_t *)rm->b_rptr;
		for (;ptr<(int16_t *)rm->b_wptr;ptr++)
		{
			*ptr=(int16_t)(((float)(*ptr))*gain_volume_in);
		}
	}
	
	ms_mutex_lock(&d->mutex);
	putq(&d->rq,rm);
	ms_mutex_unlock(&d->mutex);
	rm=NULL;
	
	return err;
}
static void configure_video_source(VideoStream *stream){
	MSVideoSize vsize,cam_vsize;
	float fps=15;
	MSPixFmt format;

	/* transmit orientation to source filter */
	ms_filter_call_method(stream->source,MS_VIDEO_CAPTURE_SET_DEVICE_ORIENTATION,&stream->device_orientation);
	
    /* transmit its preview window id if any to source filter*/
	if (stream->preview_window_id!=0){
		video_stream_set_native_preview_window_id(stream, stream->preview_window_id);
	}

	ms_filter_call_method(stream->encoder,MS_FILTER_GET_VIDEO_SIZE,&vsize);
	vsize=get_compatible_size(vsize,stream->sent_vsize);
	ms_filter_call_method(stream->source,MS_FILTER_SET_VIDEO_SIZE,&vsize);
	/*the camera may not support the target size and suggest a one close to the target */
	ms_filter_call_method(stream->source,MS_FILTER_GET_VIDEO_SIZE,&cam_vsize);
	if (cam_vsize.width*cam_vsize.height<=vsize.width*vsize.height &&
			cam_vsize.width != vsize.width){
		vsize=cam_vsize;
		ms_message("Output video size adjusted to match camera resolution (%ix%i)\n",vsize.width,vsize.height);
	} else if (cam_vsize.width*cam_vsize.height>vsize.width*vsize.height){
#if TARGET_IPHONE_SIMULATOR || defined(__arm__)
		ms_error("Camera is proposing a size bigger than encoder's suggested size (%ix%i > %ix%i) "
		           "Using the camera size as fallback because cropping or resizing is not implemented for arm.",
		           cam_vsize.width,cam_vsize.height,vsize.width,vsize.height);
		vsize=cam_vsize;
#else
		vsize=get_with_same_orientation(vsize,cam_vsize);
		ms_warning("Camera video size greater than encoder one. A scaling filter will be used!\n");
#endif
	}
	ms_filter_call_method(stream->encoder,MS_FILTER_SET_VIDEO_SIZE,&vsize);
	ms_filter_call_method(stream->encoder,MS_FILTER_GET_FPS,&fps);
	ms_message("Setting sent vsize=%ix%i, fps=%f",vsize.width,vsize.height,fps);
	/* configure the filters */
	if (ms_filter_get_id(stream->source)!=MS_STATIC_IMAGE_ID) {
		ms_filter_call_method(stream->source,MS_FILTER_SET_FPS,&fps);
	}
	/* get the output format for webcam reader */
	ms_filter_call_method(stream->source,MS_FILTER_GET_PIX_FMT,&format);

	if (format==MS_MJPEG){
		stream->pixconv=ms_filter_new(MS_MJPEG_DEC_ID);
	}else{
		stream->pixconv = ms_filter_new(MS_PIX_CONV_ID);
		/*set it to the pixconv */
		ms_filter_call_method(stream->pixconv,MS_FILTER_SET_PIX_FMT,&format);
		ms_filter_call_method(stream->pixconv,MS_FILTER_SET_VIDEO_SIZE,&cam_vsize);
	}
	stream->sizeconv=ms_filter_new(MS_SIZE_CONV_ID);
	ms_filter_call_method(stream->sizeconv,MS_FILTER_SET_VIDEO_SIZE,&vsize);
	if (stream->rc){
		ms_bitrate_controller_destroy(stream->rc);
		stream->rc=NULL;
	}
	if (stream->use_rc){
		stream->rc=ms_av_bitrate_controller_new(NULL,NULL,stream->session,stream->encoder);
	}
}
Exemple #23
0
static int ca_open_w(CAData *d){
	OSStatus result;
	UInt32 param;
	AudioDeviceID fInputDeviceID;
	
	ComponentDescription desc;  
	Component comp;
	
	// Get Default Input audio unit
	desc.componentType = kAudioUnitType_Output;
	desc.componentSubType = kAudioUnitSubType_HALOutput;
	desc.componentManufacturer = kAudioUnitManufacturer_Apple;
	desc.componentFlags = 0;
	desc.componentFlagsMask = 0;
	
	comp = FindNextComponent(NULL, &desc);
	if (comp == NULL)
	{
		ms_message("Cannot find audio component");
		return -1;
	}
	
	result = OpenAComponent(comp, &d->caOutAudioUnit);
	if(result != noErr)
	{
		ms_message("Cannot open audio component %x", result);
		return -1;
	}
	
	param = 1;
	result = AudioUnitSetProperty(d->caOutAudioUnit,
								  kAudioOutputUnitProperty_EnableIO,
								  kAudioUnitScope_Output,
								  0,
								  &param,
								  sizeof(UInt32));
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	param = 0;
	result = AudioUnitSetProperty(d->caOutAudioUnit,
								  kAudioOutputUnitProperty_EnableIO,
								  kAudioUnitScope_Input,
								  1,
								  &param,
								  sizeof(UInt32));
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	// Set the current device to the default input unit.
	result = AudioUnitSetProperty(d->caOutAudioUnit,
								  kAudioOutputUnitProperty_CurrentDevice,
								  kAudioUnitScope_Global,
								  0,
								  &d->dev,
								  sizeof(AudioDeviceID));
	if (result == kAudioUnitErr_InvalidPropertyValue)
		ms_message("AudioUnitSetProperty kAudioUnitErr_InvalidPropertyValue");
	else
		ms_message("AudioUnitSetProperty %i %x", result, result);
	
	UInt32 asbdsize = sizeof(AudioStreamBasicDescription);
	memset((char *)&d->caOutASBD, 0, asbdsize);
	
	// Setup Output audio unit
	result = AudioUnitGetProperty (d->caOutAudioUnit,
								   kAudioUnitProperty_StreamFormat,
								   kAudioUnitScope_Output,
								   0,
								   &d->caOutASBD,
								   &asbdsize);
	ms_message("AudioUnitGetProperty %i %x", result, result);
	result = AudioUnitSetProperty (d->caOutAudioUnit,
								   kAudioUnitProperty_StreamFormat,
								   kAudioUnitScope_Input,
								   0,
								   &d->caOutASBD,
								   asbdsize);
	ms_message("AudioUnitSetProperty %i %x", result, result);
	
	d->caSourceBuffer=NULL;
	
	result = AudioUnitInitialize(d->caOutAudioUnit);
	if(result != noErr)
	{
		ms_error("failed to AudioUnitInitialize output %i", result);
		return -1;
	}
	return 0;
}
int video_stream_start (VideoStream *stream, RtpProfile *profile, const char *rem_rtp_ip, int rem_rtp_port,
	const char *rem_rtcp_ip, int rem_rtcp_port, int payload, int jitt_comp, MSWebCam *cam){
	PayloadType *pt;
	RtpSession *rtps=stream->session;
	MSPixFmt format;
	MSVideoSize disp_size;
	int tmp;
	JBParameters jbp;
	const int socket_buf_size=2000000;

	if (cam==NULL){
		cam=ms_web_cam_manager_get_default_cam (
		      ms_web_cam_manager_get());
	}

	pt=rtp_profile_get_payload(profile,payload);
	if (pt==NULL){
		ms_error("videostream.c: undefined payload type.");
		return -1;
	}

	rtp_session_set_profile(rtps,profile);
	if (rem_rtp_port>0) rtp_session_set_remote_addr_full(rtps,rem_rtp_ip,rem_rtp_port,rem_rtcp_ip,rem_rtcp_port);
	rtp_session_set_payload_type(rtps,payload);
	rtp_session_set_jitter_compensation(rtps,jitt_comp);

	rtp_session_signal_connect(stream->session,"payload_type_changed",
			(RtpCallback)payload_type_changed,(unsigned long)stream);

	rtp_session_get_jitter_buffer_params(stream->session,&jbp);
	jbp.max_packets=1000;//needed for high resolution video
	rtp_session_set_jitter_buffer_params(stream->session,&jbp);
	rtp_session_set_rtp_socket_recv_buffer_size(stream->session,socket_buf_size);
	rtp_session_set_rtp_socket_send_buffer_size(stream->session,socket_buf_size);

	if (stream->dir==VideoStreamSendRecv || stream->dir==VideoStreamSendOnly){
		/*plumb the outgoing stream */

		if (rem_rtp_port>0) ms_filter_call_method(stream->rtpsend,MS_RTP_SEND_SET_SESSION,stream->session);
		stream->encoder=ms_filter_create_encoder(pt->mime_type);
		if ((stream->encoder==NULL) ){
			/* big problem: we don't have a registered codec for this payload...*/
			ms_error("videostream.c: No encoder available for payload %i:%s.",payload,pt->mime_type);
			return -1;
		}
		/* creates the filters */
		stream->cam=cam;
		stream->source = ms_web_cam_create_reader(cam);
		stream->tee = ms_filter_new(MS_TEE_ID);

		if (pt->normal_bitrate>0){
			ms_message("Limiting bitrate of video encoder to %i bits/s",pt->normal_bitrate);
			ms_filter_call_method(stream->encoder,MS_FILTER_SET_BITRATE,&pt->normal_bitrate);
		}
		if (pt->send_fmtp){
			ms_filter_call_method(stream->encoder,MS_FILTER_ADD_FMTP,pt->send_fmtp);
		}
		if (stream->use_preview_window){
			if (stream->rendercb==NULL){
				stream->output2=ms_filter_new_from_name (stream->display_name);
			}
		}

		configure_video_source (stream);
			/* and then connect all */
		ms_filter_link (stream->source, 0, stream->pixconv, 0);
		ms_filter_link (stream->pixconv, 0, stream->sizeconv, 0);
		ms_filter_link (stream->sizeconv, 0, stream->tee, 0);
		ms_filter_link (stream->tee, 0 ,stream->encoder, 0 );
		ms_filter_link (stream->encoder,0, stream->rtpsend,0);
		if (stream->output2){
			if (stream->preview_window_id!=0){
				ms_filter_call_method(stream->output2, MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID,&stream->preview_window_id);
			}
			ms_filter_link(stream->tee,1,stream->output2,0);
		}
	}
	if (stream->dir==VideoStreamSendRecv || stream->dir==VideoStreamRecvOnly){
		MSConnectionHelper ch;
		/*plumb the incoming stream */
		stream->decoder=ms_filter_create_decoder(pt->mime_type);
		if ((stream->decoder==NULL) ){
			/* big problem: we don't have a registered decoderfor this payload...*/
			ms_error("videostream.c: No decoder available for payload %i:%s.",payload,pt->mime_type);
			return -1;
		}
		ms_filter_set_notify_callback(stream->decoder, event_cb, stream);

		stream->rtprecv = ms_filter_new (MS_RTP_RECV_ID);
		ms_filter_call_method(stream->rtprecv,MS_RTP_RECV_SET_SESSION,stream->session);


		stream->jpegwriter=ms_filter_new(MS_JPEG_WRITER_ID);
		if (stream->jpegwriter)
			stream->tee2=ms_filter_new(MS_TEE_ID);

		if (stream->rendercb!=NULL){
			stream->output=ms_filter_new(MS_EXT_DISPLAY_ID);
			ms_filter_set_notify_callback(stream->output,ext_display_cb,stream);
		}else{
			stream->output=ms_filter_new_from_name (stream->display_name);
		}

		/* Don't allow null output */
		if(stream->output == NULL) {
			ms_fatal("No video display filter could be instantiated. Please check build-time configuration");
		}

		/* set parameters to the decoder*/
		if (pt->send_fmtp){
			ms_filter_call_method(stream->decoder,MS_FILTER_ADD_FMTP,pt->send_fmtp);
		}
		if (pt->recv_fmtp!=NULL)
			ms_filter_call_method(stream->decoder,MS_FILTER_ADD_FMTP,(void*)pt->recv_fmtp);

		/*force the decoder to output YUV420P */
		format=MS_YUV420P;
		ms_filter_call_method(stream->decoder,MS_FILTER_SET_PIX_FMT,&format);

		/*configure the display window */
		if(stream->output != NULL) {
			disp_size.width=MS_VIDEO_SIZE_CIF_W;
			disp_size.height=MS_VIDEO_SIZE_CIF_H;
			tmp=1;
			ms_filter_call_method(stream->output,MS_FILTER_SET_VIDEO_SIZE,&disp_size);
			ms_filter_call_method(stream->output,MS_VIDEO_DISPLAY_ENABLE_AUTOFIT,&tmp);
			ms_filter_call_method(stream->output,MS_FILTER_SET_PIX_FMT,&format);
			ms_filter_call_method(stream->output,MS_VIDEO_DISPLAY_SET_LOCAL_VIEW_MODE,&stream->corner);
			if (stream->window_id!=0){
				ms_filter_call_method(stream->output, MS_VIDEO_DISPLAY_SET_NATIVE_WINDOW_ID,&stream->window_id);
			}
			if (stream->display_filter_auto_rotate_enabled) {
				ms_filter_call_method(stream->output,MS_VIDEO_DISPLAY_SET_DEVICE_ORIENTATION,&stream->device_orientation);
			}
		}
		/* and connect the filters */
		ms_connection_helper_start (&ch);
		ms_connection_helper_link (&ch,stream->rtprecv,-1,0);
		ms_connection_helper_link (&ch,stream->decoder,0,0);
		if (stream->tee2){
			ms_connection_helper_link (&ch,stream->tee2,0,0);
			ms_filter_link(stream->tee2,1,stream->jpegwriter,0);
		}
		if (stream->output!=NULL)
			ms_connection_helper_link (&ch,stream->output,0,-1);
		/* the video source must be send for preview , if it exists*/
		if (stream->tee!=NULL && stream->output!=NULL && stream->output2==NULL)
			ms_filter_link(stream->tee,1,stream->output,1);
	}

	/* create the ticker */
	if (stream->ticker==NULL) start_ticker(stream);

	/* attach the graphs */
	if (stream->source)
		ms_ticker_attach (stream->ticker, stream->source);
	if (stream->rtprecv)
		ms_ticker_attach (stream->ticker, stream->rtprecv);
	return 0;
}
Exemple #25
0
static void ecc_play_tones(EcCalibrator *ecc){
	MSDtmfGenCustomTone tone;
	MSToneDetectorDef expected_tone;
	
	memset(&tone,0,sizeof(tone));
	memset(&expected_tone,0,sizeof(expected_tone));

	ms_filter_add_notify_callback(ecc->det,on_tone_received,ecc,TRUE);

	/* configure the tones to be scanned */
	
	strncpy(expected_tone.tone_name,"freq1",sizeof(expected_tone.tone_name));
	expected_tone.frequency=2000;
	expected_tone.min_duration=40;
	expected_tone.min_amplitude=0.1;

	ms_filter_call_method (ecc->det,MS_TONE_DETECTOR_ADD_SCAN,&expected_tone);
	
	strncpy(expected_tone.tone_name,"freq2",sizeof(expected_tone.tone_name));
	expected_tone.frequency=2300;
	expected_tone.min_duration=40;
	expected_tone.min_amplitude=0.1;

	ms_filter_call_method (ecc->det,MS_TONE_DETECTOR_ADD_SCAN,&expected_tone);
	
	strncpy(expected_tone.tone_name,"freq3",sizeof(expected_tone.tone_name));
	expected_tone.frequency=2500;
	expected_tone.min_duration=40;
	expected_tone.min_amplitude=0.1;

	ms_filter_call_method (ecc->det,MS_TONE_DETECTOR_ADD_SCAN,&expected_tone);
	
	/*play an initial tone to startup the audio playback/capture*/
	
	tone.frequencies[0]=140;
	tone.duration=1000;
	tone.amplitude=0.5;

	ms_filter_call_method(ecc->gen,MS_DTMF_GEN_PLAY_CUSTOM,&tone);
	ms_sleep(2);

	ms_filter_add_notify_callback(ecc->gen,on_tone_sent,ecc,TRUE);
	
	/* play the three tones*/
	
	tone.frequencies[0]=2000;
	tone.duration=100;
	ms_filter_call_method(ecc->gen,MS_DTMF_GEN_PLAY_CUSTOM,&tone);
	ms_usleep(300000);
	
	tone.frequencies[0]=2300;
	tone.duration=100;
	ms_filter_call_method(ecc->gen,MS_DTMF_GEN_PLAY_CUSTOM,&tone);
	ms_usleep(300000);
	
	tone.frequencies[0]=2500;
	tone.duration=100;
	ms_filter_call_method(ecc->gen,MS_DTMF_GEN_PLAY_CUSTOM,&tone);
	ms_sleep(1);
	
	if (ecc->freq1 && ecc->freq2 && ecc->freq3) {
		int delay=ecc->acc/3;
		if (delay<0){
			ms_error("Quite surprising calibration result, delay=%i",delay);
			ecc->status=LinphoneEcCalibratorFailed;
		}else{
			ms_message("Echo calibration estimated delay to be %i ms",delay);
			ecc->delay=delay;
			ecc->status=LinphoneEcCalibratorDone;
		}
	} else if ((ecc->freq1 || ecc->freq2 || ecc->freq3)==FALSE) {
			ms_message("Echo calibration succeeded, no echo has been detected");
			ecc->status = LinphoneEcCalibratorDoneNoEcho;
	} else {
			ecc->status = LinphoneEcCalibratorFailed;
	}

	if (ecc->status == LinphoneEcCalibratorFailed) {
		ms_error("Echo calibration failed.");
	}
}
Exemple #26
0
static int v4w_configure_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		int pos=0;
		while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
		{
			IPropertyBag *pBag;
			hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
			if( hr != S_OK )
				continue; 
		}

	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	return 0;
}
Exemple #27
0
int ms_factory_load_plugins(MSFactory *factory, const char *dir){
	int num=0;
#if defined(_WIN32) && !defined(_WIN32_WCE)
	WIN32_FIND_DATA FileData;
	HANDLE hSearch;
	char szDirPath[1024];
#ifdef UNICODE
	wchar_t wszDirPath[1024];
#endif
	char szPluginFile[1024];
	BOOL fFinished = FALSE;
	const char *tmp = NULL;
	BOOL debug = FALSE;
#ifndef MS2_WINDOWS_UNIVERSAL
	tmp = getenv("DEBUG");
#endif
	debug = (tmp != NULL && atoi(tmp) == 1);

	snprintf(szDirPath, sizeof(szDirPath), "%s", dir);

	// Start searching for .dll files in the current directory.
#ifdef MS2_WINDOWS_DESKTOP
	snprintf(szDirPath, sizeof(szDirPath), "%s\\*.dll", dir);
#else
	snprintf(szDirPath, sizeof(szDirPath), "%s\\libms*.dll", dir);
#endif
#ifdef UNICODE
	mbstowcs(wszDirPath, szDirPath, sizeof(wszDirPath));
	hSearch = FindFirstFileExW(wszDirPath, FindExInfoStandard, &FileData, FindExSearchNameMatch, NULL, 0);
#else
	hSearch = FindFirstFileExA(szDirPath, FindExInfoStandard, &FileData, FindExSearchNameMatch, NULL, 0);
#endif
	if (hSearch == INVALID_HANDLE_VALUE)
	{
		ms_message("no plugin (*.dll) found in [%s] [%d].", szDirPath, (int)GetLastError());
		return 0;
	}
	snprintf(szDirPath, sizeof(szDirPath), "%s", dir);

	while (!fFinished)
	{
		/* load library */
#ifdef MS2_WINDOWS_DESKTOP
		UINT em=0;
#endif
		HINSTANCE os_handle;
#ifdef UNICODE
		wchar_t wszPluginFile[2048];
		char filename[512];
		wcstombs(filename, FileData.cFileName, sizeof(filename));
		snprintf(szPluginFile, sizeof(szPluginFile), "%s\\%s", szDirPath, filename);
		mbstowcs(wszPluginFile, szPluginFile, sizeof(wszPluginFile));
#else
		snprintf(szPluginFile, sizeof(szPluginFile), "%s\\%s", szDirPath, FileData.cFileName);
#endif
#ifdef MS2_WINDOWS_DESKTOP
		if (!debug) em = SetErrorMode (SEM_FAILCRITICALERRORS);

#ifdef UNICODE
		os_handle = LoadLibraryExW(wszPluginFile, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
#else
		os_handle = LoadLibraryExA(szPluginFile, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
#endif
		if (os_handle==NULL)
		{
			ms_message("Fail to load plugin %s with altered search path: error %i",szPluginFile,(int)GetLastError());
#ifdef UNICODE
			os_handle = LoadLibraryExW(wszPluginFile, NULL, 0);
#else
			os_handle = LoadLibraryExA(szPluginFile, NULL, 0);
#endif
		}
		if (!debug) SetErrorMode (em);
#else
		os_handle = LoadPackagedLibrary(wszPluginFile, 0);
#endif
		if (os_handle==NULL)
			ms_error("Fail to load plugin %s: error %i", szPluginFile, (int)GetLastError());
		else{
			init_func_t initroutine;
			char szPluginName[256];
			char szMethodName[256];
			char *minus;
#ifdef UNICODE
			snprintf(szPluginName, sizeof(szPluginName), "%s", filename);
#else
			snprintf(szPluginName, sizeof(szPluginName), "%s", FileData.cFileName);
#endif
			/*on mingw, dll names might be libsomething-3.dll. We must skip the -X.dll stuff*/
			minus=strchr(szPluginName,'-');
			if (minus) *minus='\0';
			else szPluginName[strlen(szPluginName)-4]='\0'; /*remove .dll*/
			snprintf(szMethodName, sizeof(szMethodName), "%s_init", szPluginName);
			initroutine = (init_func_t) GetProcAddress (os_handle, szMethodName);
				if (initroutine!=NULL){
					initroutine(factory);
					ms_message("Plugin loaded (%s)", szPluginFile);
					// Add this new loaded plugin to the list (useful for FreeLibrary at the end)
					factory->ms_plugins_loaded_list=ms_list_append(factory->ms_plugins_loaded_list,os_handle);
					num++;
				}else{
					ms_warning("Could not locate init routine of plugin %s. Should be %s",
					szPluginFile, szMethodName);
				}
		}
		if (!FindNextFile(hSearch, &FileData)) {
			if (GetLastError() == ERROR_NO_MORE_FILES){
				fFinished = TRUE;
			}
			else
			{
				ms_error("couldn't find next plugin dll.");
				fFinished = TRUE;
			}
		}
	}
	/* Close the search handle. */
	FindClose(hSearch);

#elif defined(HAVE_DLOPEN)
	char plugin_name[64];
	DIR *ds;
	MSList *loaded_plugins = NULL;
	struct dirent *de;
	char *ext;
	char *fullpath;
	ds=opendir(dir);
	if (ds==NULL){
		ms_message("Cannot open directory %s: %s",dir,strerror(errno));
		return -1;
	}
	while( (de=readdir(ds))!=NULL){
		if (
#ifndef __QNX__
			(de->d_type==DT_REG || de->d_type==DT_UNKNOWN || de->d_type==DT_LNK) &&
#endif
			(ext=strstr(de->d_name,PLUGINS_EXT))!=NULL) {
			void *handle;
			snprintf(plugin_name, MIN(sizeof(plugin_name), ext - de->d_name + 1), "%s", de->d_name);
			if (ms_list_find_custom(loaded_plugins, (MSCompareFunc)strcmp, plugin_name) != NULL) continue;
			loaded_plugins = ms_list_append(loaded_plugins, ms_strdup(plugin_name));
			fullpath=ms_strdup_printf("%s/%s",dir,de->d_name);
			ms_message("Loading plugin %s...",fullpath);

			if ( (handle=dlopen(fullpath,RTLD_NOW))==NULL){
				ms_warning("Fail to load plugin %s : %s",fullpath,dlerror());
			}else {
				char *initroutine_name=ms_malloc0(strlen(de->d_name)+10);
				char *p;
				void *initroutine=NULL;
				strcpy(initroutine_name,de->d_name);
				p=strstr(initroutine_name,PLUGINS_EXT);
				if (p!=NULL){
					strcpy(p,"_init");
					initroutine=dlsym(handle,initroutine_name);
				}

#ifdef __APPLE__
				if (initroutine==NULL){
					/* on macosx: library name are libxxxx.1.2.3.dylib */
					/* -> MUST remove the .1.2.3 */
					p=strstr(initroutine_name,".");
					if (p!=NULL)
					{
						strcpy(p,"_init");
						initroutine=dlsym(handle,initroutine_name);
					}
				}
#endif

				if (initroutine!=NULL){
					init_func_t func=(init_func_t)initroutine;
					func(factory);
					ms_message("Plugin loaded (%s)", fullpath);
					num++;
				}else{
					ms_warning("Could not locate init routine of plugin %s",de->d_name);
				}
				ms_free(initroutine_name);
			}
			ms_free(fullpath);
		}
	}
	ms_list_for_each(loaded_plugins, ms_free);
	ms_list_free(loaded_plugins);
	closedir(ds);
#else
	ms_warning("no loadable plugin support: plugins cannot be loaded.");
	num=-1;
#endif
	return num;
}
Exemple #28
0
static int v4w_open_videodevice(V4wState *s)
{
	// Initialize COM
	CoInitialize(NULL);

	// get a Graph
	HRESULT hr= CoCreateInstance (CLSID_FilterGraph,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IGraphBuilder, //IID_IBaseFilter,
		(void **)&s->m_pGraph);
	if(FAILED(hr))
	{
		return -1;
	}

	// get a CaptureGraphBuilder2
	hr= CoCreateInstance (CLSID_CaptureGraphBuilder2,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_ICaptureGraphBuilder2, //IID_IBaseFilter,
		(void **)&s->m_pBuilder);
	if(FAILED(hr))
	{
		return -2;
	}

	// connect capture graph builder with the graph
	s->m_pBuilder->SetFiltergraph(s->m_pGraph);

	// get mediacontrol so we can start and stop the filter graph
	hr=s->m_pGraph->QueryInterface (IID_IMediaControl, (void **)&s->m_pControl);
	if(FAILED(hr))
	{
		return -3;
	}


	ICreateDevEnum *pCreateDevEnum = NULL;
	IEnumMoniker *pEnumMoniker = NULL;
	IMoniker *pMoniker = NULL;

	ULONG nFetched = 0;

	hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, 
		IID_ICreateDevEnum, (PVOID *)&pCreateDevEnum);
	if(FAILED(hr))
	{
		return -4;
	}

	hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
		&pEnumMoniker, 0);
	if (FAILED(hr) || pEnumMoniker == NULL) {
		//printf("no device\n");
		return -5;
	}

	pEnumMoniker->Reset();

	int pos=0;
	while(S_OK == pEnumMoniker->Next(1, &pMoniker, &nFetched) )
	{
		IPropertyBag *pBag;
		hr = pMoniker->BindToStorage( 0, 0, IID_IPropertyBag, (void**) &pBag );
		if( hr != S_OK )
			continue; 

		if (s->dev[0]=='\0')
			break;

		VARIANT var;
		VariantInit(&var);
		hr = pBag->Read( L"FriendlyName", &var, NULL ); 
		if( hr != S_OK )
		{
			pMoniker->Release();
			continue;
		}
		//USES_CONVERSION;
		char szName[256];

		WideCharToMultiByte(CP_UTF8,0,var.bstrVal,-1,szName,256,0,0);
		VariantClear(&var); 

		if (strcmp(szName, s->dev)==0)
			break;

		pMoniker->Release();
		pBag->Release();
		pMoniker=NULL;
		pBag=NULL;
	}

	if(pMoniker==NULL)
	{
		return -6;
	}

	hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&s->m_pDeviceFilter );
	if(FAILED(hr))
	{
		return -7;
	}

	s->m_pGraph->AddFilter(s->m_pDeviceFilter, L"Device Filter");

	pMoniker->Release();
	pEnumMoniker->Release();
	pCreateDevEnum->Release();


	GUID pPinCategory;

	if (try_format(s->m_pDeviceFilter, s->pix_fmt, &pPinCategory)==0)
		s->pix_fmt = s->pix_fmt;
	else if (try_format(s->m_pDeviceFilter,MS_YUV420P, &pPinCategory)==0)
		s->pix_fmt = MS_YUV420P;
	else if (try_format(s->m_pDeviceFilter,MS_YUY2, &pPinCategory)==0)
		s->pix_fmt = MS_YUY2;
	else if (try_format(s->m_pDeviceFilter,MS_YUYV, &pPinCategory)==0)
		s->pix_fmt = MS_YUYV;
	else if (try_format(s->m_pDeviceFilter,MS_UYVY, &pPinCategory)==0)
		s->pix_fmt = MS_UYVY;
	else if (try_format(s->m_pDeviceFilter,MS_RGB24, &pPinCategory)==0)
		s->pix_fmt = MS_RGB24;
	else
	{
		ms_error("Unsupported video pixel format.");
		return -8;
	}

	if (s->pix_fmt == MS_YUV420P)
		ms_message("Driver supports YUV420P, using that format.");
	else if (s->pix_fmt == MS_YUY2)
		ms_message("Driver supports YUY2 (YUYV), using that format.");
	else if (s->pix_fmt == MS_YUYV)
		ms_message("Driver supports YUV422, using that format.");
	else if (s->pix_fmt == MS_UYVY)
		ms_message("Driver supports UYVY, using that format.");
	else if (s->pix_fmt == MS_RGB24)
		ms_message("Driver supports RGB24, using that format.");

	if (try_format_size(s, s->pix_fmt, s->vsize.width, s->vsize.height, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", s->vsize.width, s->vsize.height);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QCIF_W, MS_VIDEO_SIZE_QCIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_CIF_W, MS_VIDEO_SIZE_CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4CIF_W, MS_VIDEO_SIZE_4CIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QVGA_W, MS_VIDEO_SIZE_QVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QQVGA_W, MS_VIDEO_SIZE_QQVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_NS1_W, MS_VIDEO_SIZE_NS1_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_QSIF_W, MS_VIDEO_SIZE_QSIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SIF_W, MS_VIDEO_SIZE_SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_VGA_W, MS_VIDEO_SIZE_VGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SDTV_W, MS_VIDEO_SIZE_SDTV_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_288P_W, MS_VIDEO_SIZE_288P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_448P_W, MS_VIDEO_SIZE_448P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_576P_W, MS_VIDEO_SIZE_576P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_720P_W, MS_VIDEO_SIZE_720P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_1080P_W, MS_VIDEO_SIZE_1080P_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_4SIF_W, MS_VIDEO_SIZE_4SIF_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_SVGA_W, MS_VIDEO_SIZE_SVGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_XGA_W, MS_VIDEO_SIZE_XGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_WXGA_W, MS_VIDEO_SIZE_WXGA_H);
	else if (try_format_size(s, s->pix_fmt, MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H, &pPinCategory)==0)
		ms_message("Selected Size: %ix%i.", MS_VIDEO_SIZE_HDTVP_W, MS_VIDEO_SIZE_HDTVP_H);
	else
	{
		ms_error("No supported size found for format.");
		/* size not supported? */
		return -9;
	}

	// get DXFilter
	s->m_pDXFilter = new CDXFilter(NULL, &hr, FALSE);
	if(s->m_pDXFilter==NULL)
	{
		return -10;
	}
	s->m_pDXFilter->AddRef();

	CMediaType mt;
	mt.SetType(&MEDIATYPE_Video);

	GUID m = MEDIASUBTYPE_RGB24;
	if (s->pix_fmt == MS_YUV420P)
		m = (GUID)FOURCCMap(MAKEFOURCC('I','4','2','0'));
	else if (s->pix_fmt == MS_YUY2)
		m = MEDIASUBTYPE_YUY2;
	else if (s->pix_fmt == MS_YUYV)
		m = MEDIASUBTYPE_YUYV;
	else if (s->pix_fmt == MS_UYVY)
		m = MEDIASUBTYPE_UYVY;
	else if (s->pix_fmt == MS_RGB24)
		m = MEDIASUBTYPE_RGB24;
	mt.SetSubtype(&m);

	mt.formattype = FORMAT_VideoInfo;
	mt.SetTemporalCompression(FALSE);

	VIDEOINFO *pvi = (VIDEOINFO *)
		mt.AllocFormatBuffer(sizeof(VIDEOINFO));
	if (NULL == pvi)
		return -11;
	ZeroMemory(pvi, sizeof(VIDEOINFO));

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','V');
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biCompression = BI_RGB;

	if (s->pix_fmt == MS_YUV420P)
		pvi->bmiHeader.biBitCount = 12;
	else if (s->pix_fmt == MS_YUY2)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_YUYV)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_UYVY)
		pvi->bmiHeader.biBitCount = 16;
	else if (s->pix_fmt == MS_RGB24)
		pvi->bmiHeader.biBitCount = 24;

	pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
	pvi->bmiHeader.biWidth = s->vsize.width;
	pvi->bmiHeader.biHeight = s->vsize.height;
	pvi->bmiHeader.biPlanes = 1;
	pvi->bmiHeader.biSizeImage = GetBitmapSize(&pvi->bmiHeader);
	pvi->bmiHeader.biClrImportant = 0;
	mt.SetSampleSize(pvi->bmiHeader.biSizeImage);

	mt.SetFormat((BYTE*)pvi, sizeof(VIDEOINFO));

	hr = s->m_pDXFilter->SetAcceptedMediaType(&mt);
	if(FAILED(hr))
	{
		return -12;
	}

	hr = s->m_pDXFilter->SetCallback(Callback); 
	if(FAILED(hr))
	{
		return -13;
	}

	hr = s->m_pDXFilter->QueryInterface(IID_IBaseFilter,
		(LPVOID *)&s->m_pIDXFilter);
	if(FAILED(hr))
	{
		return -14;
	}

	hr = s->m_pGraph->AddFilter(s->m_pIDXFilter, L"DXFilter Filter");
	if(FAILED(hr))
	{
		return -15;
	}


	// get null renderer
	hr=CoCreateInstance (CLSID_NullRenderer,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IBaseFilter,
		(void **)&s->m_pNullRenderer);
	if(FAILED(hr))
	{
		return -16;
	}
	if (s->m_pNullRenderer!=NULL)
	{
		s->m_pGraph->AddFilter(s->m_pNullRenderer, L"Null Renderer");
	}

	hr = s->m_pBuilder->RenderStream(&pPinCategory,
		&MEDIATYPE_Video, s->m_pDeviceFilter, s->m_pIDXFilter, s->m_pNullRenderer);
	if (FAILED(hr))
	{
		return -17;
	}

	IAMStreamConfig *pConfig = NULL;
	hr = s->m_pBuilder->FindInterface(
		&pPinCategory, // Preview pin.
		&MEDIATYPE_Video,    // Any media type.
		s->m_pDeviceFilter, // Pointer to the capture filter.
		IID_IAMStreamConfig, (void**)&pConfig); 
	if (pConfig!=NULL)
	{
		AM_MEDIA_TYPE *pType = NULL;
		int iCount, iSize;
		pConfig->GetNumberOfCapabilities(&iCount, &iSize);

		for (int i = 0; i < iCount; i++) {
			VIDEO_STREAM_CONFIG_CAPS scc;
			pType = NULL;
			pConfig->GetStreamCaps(i, &pType, (BYTE *)&scc);

			if (!((pType->formattype == FORMAT_VideoInfo) &&
				(pType->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
				(pType->pbFormat != NULL)))
				continue;

			VIDEOINFOHEADER & videoInfo = *(VIDEOINFOHEADER *)pType->pbFormat;

			if (m != pType->subtype)
				continue;

			if (videoInfo.bmiHeader.biWidth != s->vsize.width)
				continue;

			if (videoInfo.bmiHeader.biHeight != s->vsize.height)
				continue;

			if (videoInfo.bmiHeader.biBitCount != pvi->bmiHeader.biBitCount)
				continue;

			if (videoInfo.bmiHeader.biCompression != pvi->bmiHeader.biCompression)
				continue;

			videoInfo.AvgTimePerFrame = UNITS / (LONGLONG)s->fps;
			pConfig->SetFormat(pType);    
		}

		pConfig->GetFormat(&pType);
		if (pType!=NULL)
		{
			VIDEOINFO *pvi;
			pvi = (VIDEOINFO *)pType->pbFormat;
			ms_message("v4w: camera asked fps=%.2f // real fps=%.2f", s->fps, ((float)UNITS / (float)pvi->AvgTimePerFrame));
		}

		pConfig->Release();
	}

	//m_pDXFilter->SetBufferSamples(TRUE);

	s_callback = s;
	hr = s->m_pControl->Run();
	if(FAILED(hr))
	{
		return -18;
	}


	s->rotregvalue=1;
	return 0;
}
static void file_transfer_message_download_cancelled(void) {
#if 0
	int i;
	char* to;
	LinphoneChatRoom* chat_room;
	LinphoneChatMessage* message;
	LinphoneContent content;
	const char* big_file_content="big file"; /* setting dummy file content to something */
	LinphoneCoreManager* marie = linphone_core_manager_new( "marie_rc");
	LinphoneCoreManager* pauline = linphone_core_manager_new( "pauline_rc");
	reset_counters(&marie->stat);
	reset_counters(&pauline->stat);

	/* setting dummy file content to something */
	for (i=0;i<sizeof(big_file);i+=strlen(big_file_content))
		memcpy(big_file+i, big_file_content, strlen(big_file_content));

	big_file[0]=*"S";
	big_file[sizeof(big_file)-1]=*"E";

	/* Globally configure an http file transfer server. */
	linphone_core_set_file_transfer_server(pauline->lc,"https://www.linphone.org:444/lft.php");

	/* create a chatroom on pauline's side */
	to = linphone_address_as_string(marie->identity);
	chat_room = linphone_core_create_chat_room(pauline->lc,to);

	/* create a file transfer message */
	memset(&content,0,sizeof(content));
	content.type="text";
	content.subtype="plain";
	content.size=sizeof(big_file); /*total size to be transfered*/
	content.name = "bigfile.txt";
	message = linphone_chat_room_create_file_transfer_message(chat_room, &content);
	{
		int dummy=0;
		wait_for_until(marie->lc,pauline->lc,&dummy,1,100); /*just to have time to purge message stored in the server*/
		reset_counters(&marie->stat);
		reset_counters(&pauline->stat);
	}
	linphone_chat_room_send_message2(chat_room,message,liblinphone_tester_chat_message_state_change,pauline->lc);

	/* wait for marie to receive pauline's message */
	CU_ASSERT_TRUE(wait_for(pauline->lc,marie->lc,&marie->stat.number_of_LinphoneMessageReceivedWithFile,1));


	if (marie->stat.last_received_chat_message ) { /* get last message and use it to download file */
		linphone_chat_message_start_file_download(marie->stat.last_received_chat_message, liblinphone_tester_chat_message_state_change, marie->lc);
		/* wait for file to be 50% downloaded */
		CU_ASSERT_TRUE(wait_for(pauline->lc,marie->lc,&marie->stat.progress_of_LinphoneFileTransfer, 50));
		/* and cancel the transfer */
		linphone_chat_message_cancel_file_transfer(marie->stat.last_received_chat_message);
	}

	CU_ASSERT_EQUAL(pauline->stat.number_of_LinphoneMessageInProgress,1);
	CU_ASSERT_EQUAL(pauline->stat.number_of_LinphoneMessageDelivered,1);
	CU_ASSERT_EQUAL(marie->stat.number_of_LinphoneMessageExtBodyReceived,0);
	CU_ASSERT_EQUAL(marie->stat.number_of_LinphoneMessageNotDelivered,1);

	linphone_core_manager_destroy(marie);
	linphone_core_manager_destroy(pauline);
#endif
	ms_error("Test skipped");
}
Exemple #30
0
/**
 * Constructs a LinphoneAddress object by parsing the user supplied address,
 * given as a string.
**/
LinphoneAddress * linphone_address_new(const char *addr){
	SalAddress *saddr=sal_address_new(addr);
	if (saddr==NULL)
		ms_error("Cannot create LinphoneAddress, bad uri [%s]",addr);
	return saddr;
}