void rgb_callback(freenect_device *dev, void *pixels, uint32_t timestamp){
	t_jit_freenect_grab *x;
	
	x = freenect_get_user(dev);
	
	if(!x)	
	{
		error("Invalid max object supplied in rgb_callback\n");// TODO:should print only in debug mode
		return;
    }
		
	

	if (x->is_open)
	{
	systhread_mutex_lock(x->backbuffer_mutex);
	//pthread_mutex_lock(&x->cb_mutex);
	
	/*
	x->rgb_data = pixels;
	x->rgb_timestamp = timestamp;
	x->have_rgb_frames++;
	*/
	
	//assert(x->rgb_back == pixels);
	x->rgb_back = x->rgb_mid;
	freenect_set_video_buffer(dev, x->rgb_back);
	x->rgb_mid = (uint8_t*)pixels;
	x->got_rgb++;
	
    systhread_mutex_unlock(x->backbuffer_mutex);
	}
    //pthread_mutex_unlock(&x->cb_mutex);
	
}
void depth_callback(freenect_device *dev, void *pixels, uint32_t timestamp){
	t_jit_freenect_grab *x;
	
	x = freenect_get_user(dev);
	
	if(!x)	
	{
		error("Invalid max object supplied in depth_callback\n");// TODO:should print only in debug mode
		return;
    }
	//post("depth_callback called\n");//TODO:r
	
	if (x->is_open)
	{
	systhread_mutex_lock(x->backbuffer_mutex);
    
   // pthread_mutex_lock(&x->cb_mutex);
	
	//assert(x->depth_back == pixels);
//	x->depth_back = x->depth_mid;

	x->depth_back = x->depth_mid;
	freenect_set_depth_buffer(dev, x->depth_back);
	x->depth_mid = (uint16_t*)pixels;
	x->got_depth++;
	
	systhread_mutex_unlock(x->backbuffer_mutex);
    }
    //pthread_mutex_unlock(&x->cb_mutex);
}
Exemple #3
0
void in_callback(freenect_device* dev, int num_samples,
                 int32_t* mic1, int32_t* mic2,
                 int32_t* mic3, int32_t* mic4,
                 int16_t* cancelled, void *unknown) {
	pthread_mutex_lock(&audiobuf_mutex);
	capture* c = (capture*)freenect_get_user(dev);
	if(num_samples < c->max_samples - c->current_idx) {
		memcpy(&(c->buffers[0][c->current_idx]), mic1, num_samples*sizeof(int32_t));
		memcpy(&(c->buffers[1][c->current_idx]), mic2, num_samples*sizeof(int32_t));
		memcpy(&(c->buffers[2][c->current_idx]), mic3, num_samples*sizeof(int32_t));
		memcpy(&(c->buffers[3][c->current_idx]), mic4, num_samples*sizeof(int32_t));
	} else {
		int first = c->max_samples - c->current_idx;
		int left = num_samples - first;
		memcpy(&(c->buffers[0][c->current_idx]), mic1, first*sizeof(int32_t));
		memcpy(&(c->buffers[1][c->current_idx]), mic2, first*sizeof(int32_t));
		memcpy(&(c->buffers[2][c->current_idx]), mic3, first*sizeof(int32_t));
		memcpy(&(c->buffers[3][c->current_idx]), mic4, first*sizeof(int32_t));
		memcpy(c->buffers[0], &mic1[first], left*sizeof(int32_t));
		memcpy(c->buffers[1], &mic2[first], left*sizeof(int32_t));
		memcpy(c->buffers[2], &mic3[first], left*sizeof(int32_t));
		memcpy(c->buffers[3], &mic4[first], left*sizeof(int32_t));
	}
	c->current_idx = (c->current_idx + num_samples) % c->max_samples;
	c->new_data = 1;
	pthread_cond_signal(&audiobuf_cond);
	pthread_mutex_unlock(&audiobuf_mutex);
}
Exemple #4
0
void Freenect_DepthCallback_GREY16(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
    FreenectIn *vcap = freenect_get_user(dev);
    if (vcap->depth_channel) {
        memcpy(vcap->depth_buf, v_depth, vcap->out_depth_size);

        vcap->depth_sl_header.compositionTimeStamp = timestamp;
        gf_term_on_sl_packet(vcap->service, vcap->depth_channel, (char *) vcap->depth_buf, vcap->out_depth_size, &vcap->depth_sl_header, GF_OK);
    }
}
Exemple #5
0
static void kinect_video_db(freenect_device *dev, void *rgb, uint32_t timestamp)
{
    FreenectGrabber* grabber = reinterpret_cast<FreenectGrabber*>(freenect_get_user(dev));
    if (grabber->irModeEnabled()) { // ir mode
		uint8_t *ir_cast = reinterpret_cast<uint8_t*>(rgb);
		grabber->irCallBack(ir_cast, FREENECT_FRAME_W, FREENECT_FRAME_H);
	} else { // rgb mode
	    uint8_t *rgb_cast = reinterpret_cast<uint8_t*>(rgb);
	    grabber->rgbCallBack(rgb_cast, FREENECT_FRAME_W, FREENECT_FRAME_H);
	}
}
Exemple #6
0
void in_callback(freenect_device* dev, int num_samples,
                 int32_t* mic1, int32_t* mic2,
                 int32_t* mic3, int32_t* mic4,
                 int16_t* cancelled, void *unknown) {
	capture* c = (capture*)freenect_get_user(dev);
	fwrite(mic1, 1, num_samples*sizeof(int32_t), c->logfiles[0]);
	fwrite(mic2, 1, num_samples*sizeof(int32_t), c->logfiles[1]);
	fwrite(mic3, 1, num_samples*sizeof(int32_t), c->logfiles[2]);
	fwrite(mic4, 1, num_samples*sizeof(int32_t), c->logfiles[3]);
	c->samples += num_samples;
	printf("Sample received.  Total samples recorded: %d\n", c->samples);
}
Exemple #7
0
void depth_cb(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
    pthread_mutex_lock(&mutex);

    jfreenect_device_t *d = (jfreenect_device_t *) freenect_get_user(dev);
    
    (*thread_env)->SetByteArrayRegion(thread_env, d->depthbuffervalue, 0, d->depthbuffersize, d->f_depth_buffer);
    
    (*thread_env)->CallVoidMethod
        (thread_env, d->object, d->depthmethod, d->depthbuffervalue);
    
    pthread_mutex_unlock(&mutex);
}
Exemple #8
0
void Freenect_DepthCallback_ColorGradient(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
    FreenectIn *vcap = freenect_get_user(dev);
    if (vcap->depth_channel) {
        u32 i;
        u16 *depth = (u16*)v_depth;
        /*remap to color RGB using freenect gamma*/
        for (i=0; i<vcap->width*vcap->height; i++) {
            int pval = vcap->gamma[depth[i]];
            int lb = pval & 0xff;
            switch (pval>>8) {
            case 0:
                vcap->depth_buf[3*i+0] = 255;
                vcap->depth_buf[3*i+1] = 255-lb;
                vcap->depth_buf[3*i+2] = 255-lb;
                break;
            case 1:
                vcap->depth_buf[3*i+0] = 255;
                vcap->depth_buf[3*i+1] = lb;
                vcap->depth_buf[3*i+2] = 0;
                break;
            case 2:
                vcap->depth_buf[3*i+0] = 255-lb;
                vcap->depth_buf[3*i+1] = 255;
                vcap->depth_buf[3*i+2] = 0;
                break;
            case 3:
                vcap->depth_buf[3*i+0] = 0;
                vcap->depth_buf[3*i+1] = 255;
                vcap->depth_buf[3*i+2] = lb;
                break;
            case 4:
                vcap->depth_buf[3*i+0] = 0;
                vcap->depth_buf[3*i+1] = 255-lb;
                vcap->depth_buf[3*i+2] = 255;
                break;
            case 5:
                vcap->depth_buf[3*i+0] = 0;
                vcap->depth_buf[3*i+1] = 0;
                vcap->depth_buf[3*i+2] = 255-lb;
                break;
            default:
                vcap->depth_buf[3*i+0] = 0;
                vcap->depth_buf[3*i+1] = 0;
                vcap->depth_buf[3*i+2] = 0;
                break;
            }
        }
        vcap->depth_sl_header.compositionTimeStamp = timestamp;
        gf_term_on_sl_packet(vcap->service, vcap->depth_channel, (char *) vcap->depth_buf, vcap->out_depth_size, &vcap->depth_sl_header, GF_OK);
    }
Exemple #9
0
void Freenect_DepthCallback_GREY8(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
    FreenectIn *vcap = freenect_get_user(dev);
    if (vcap->depth_channel) {
        u32 i, j;
        u16 *depth = (u16*)v_depth;

        for (i=0; i<vcap->height; i++) {
            for (j=0; j<vcap->width; j++) {
                int pval = depth[j + i*vcap->width];
                pval = (255*pval) / 2048;
                vcap->depth_buf[j + i*vcap->width] = pval;
            }
        }
//		vcap->depth_sl_header.compositionTimeStamp = timestamp;
        vcap->depth_sl_header.compositionTimeStamp ++;
        gf_term_on_sl_packet(vcap->service, vcap->depth_channel, (char *) vcap->depth_buf, vcap->out_depth_size, &vcap->depth_sl_header, GF_OK);
    }
}
Exemple #10
0
void kinect_depth_callback(freenect_device* dev, void* depth, uint32_t timestamp) {

  Kinect* kinect = static_cast<Kinect*>(freenect_get_user(dev));
  if(!kinect) {
    printf("Error: cannot get the Kinect* user ptr.\n");
    ::exit(EXIT_FAILURE);
  }

  if(depth != kinect->depth_back) {
    printf("Error: wrong depth pointer!\n");
  }

  kinect->depth_back = kinect->depth_mid;
  freenect_set_depth_buffer(dev, kinect->depth_back);
  kinect->depth_mid = (uint8_t*)depth;

  uv_mutex_lock(&kinect->mutex);
  {
    memcpy(kinect->depth_front, kinect->depth_mid, kinect->nbytes_rgb);
    kinect->has_new_depth = true;
  }
  uv_mutex_unlock(&kinect->mutex);
}
Exemple #11
0
void Freenect_DepthCallback_RGBD(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
    FreenectIn *vcap = freenect_get_user(dev);
    if (vcap->depth_channel) {
        u32 i, j;
        u16 *depth = (u16*)v_depth;

        for (i=0; i<vcap->height; i++) {
            for (j=0; j<vcap->width; j++) {
                int idx_col = 3 * (j + i*vcap->width) ;
                int idx_depth = 4*(j + i*vcap->width) ;
                int pval = depth[i*vcap->width + j];
                pval = 255 - (255*pval) / 2048;

                vcap->depth_buf[idx_depth ] = vcap->vid_buf[idx_col];
                vcap->depth_buf[idx_depth + 1] = vcap->vid_buf[idx_col+1];
                vcap->depth_buf[idx_depth + 2] = vcap->vid_buf[idx_col+2];
                vcap->depth_buf[idx_depth + 3] = pval;
            }
        }
        vcap->depth_sl_header.compositionTimeStamp = timestamp;
        gf_term_on_sl_packet(vcap->service, vcap->depth_channel, (char *) vcap->depth_buf, vcap->out_depth_size, &vcap->depth_sl_header, GF_OK);
    }
}
Exemple #12
0
		static void freenect_rgb_callback(freenect_device *dev, freenect_pixel *rgb, uint32_t timestamp) {
			FreenectDevice* device = static_cast<FreenectDevice*>(freenect_get_user(dev));
			device->RGBCallback(rgb, timestamp);
		}
static void depth_producer_cb(freenect_device *dev, void *data, uint32_t timestamp)
{
	producer_cb_inner(dev, data, timestamp, &((sync_kinect_t *)freenect_get_user(dev))->depth, freenect_set_depth_buffer);
}
void depth_cb(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
	Camera* c = (Camera*)freenect_get_user(dev);
	c->addImage((uint8_t*)v_depth, 640*480*2, timestamp);
}
Exemple #15
0
static void kinect_depth_db(freenect_device *dev, void *v_depth, uint32_t timestamp)
{
    FreenectGrabber* grabber = reinterpret_cast<FreenectGrabber*>(freenect_get_user(dev));
    uint16_t *depth = reinterpret_cast<uint16_t*>(v_depth);
    grabber->depthCallBack(depth, FREENECT_FRAME_W, FREENECT_FRAME_H);
}
		static void freenect_video_callback(freenect_device *dev, void *video, uint32_t timestamp) {
			FreenectDevice* device = static_cast<FreenectDevice*>(freenect_get_user(dev));
			device->VideoCallback(video, timestamp);
		}
		static void freenect_depth_callback(freenect_device *dev, void *depth, uint32_t timestamp) {
			FreenectDevice* device = static_cast<FreenectDevice*>(freenect_get_user(dev));
			device->DepthCallback(depth, timestamp);
		}
void jit_freenect_grab_open(t_jit_freenect_grab *x,  t_symbol *s, long argc, t_atom *argv)
{
	int ndevices, devices_left, dev_ndx;
	t_jit_freenect_grab *y;
	freenect_device *dev;
	
	postNesa("opening device...\n");//TODO: remove
	
	if(x->device){
		error("A device is already open.");
		return;
	}
	x->is_open = FALSE;
	if(!f_ctx){
		
		postNesa("!f_ctx is null, opening a new device\n");//TODO: remove
		
		if (jit_freenect_restart_thread(x)!=MAX_ERR_NONE) {
			
		//if (pthread_create(&capture_thread, NULL, capture_threadfunc, NULL)) {
			error("Failed to create capture thread.");
			return;
		}
		int bailout=0;
		while((!f_ctx)&&(++bailout<1000)){
			//systhread_sleep(1);
			sleep(0);
			//post("deadlocking in the sun %i",bailout);//TODO: remove
		}
		if (!f_ctx)
		{
			// TODO: replace with conditionall
			error("Failed to init freenect after %i retries.\n",bailout);
			return;
		}
	}
	
	ndevices = freenect_num_devices(f_ctx);
	
	if(!ndevices){
		error("Could not find any connected Kinect device. Are you sure the power cord is plugged-in?");
		return;
	}
	
	devices_left = ndevices;
	dev = f_ctx->first;
	while(dev){
		dev = dev->next;
		devices_left--;
	}
	
	if(!devices_left){
		error("All Kinect devices are currently in use.");
		return;
	}
	
	if(!argc){
		x->index = 0;	
	}
	else{
		//Is the device already in use?
		x->index = jit_atom_getlong(argv);
		
		dev = f_ctx->first;
		while(dev){
			y = freenect_get_user(dev);
			if(y->index == x->index){
				error("Kinect device %d is already in use.", x->index);
				x->index = 0;
				return;
			}
			dev = dev->next;
		}
	}
	
	if(x->index > ndevices){
		error("Cannot open Kinect device %d, only %d are connected.", x->index, ndevices);
		x->index = 0;
		return;
	}
	
	//Find out which device to open
	dev_ndx = x->index;
	if(!dev_ndx){
		int found = 0;
		while(!found){
			found = 1;
			dev = f_ctx->first;
			while(dev){
				y = freenect_get_user(dev);
				if(y->index-1 == dev_ndx){
					found = 0;
					break;
				}
				dev = dev->next;
			}
			dev_ndx++;
		}
		x->index = dev_ndx;
	}
		
	if (freenect_open_device(f_ctx, &(x->device), dev_ndx-1) < 0) {
		error("Could not open Kinect device %d", dev_ndx);
		x->index = 0;
		x->device = NULL;
	}
		else {
			postNesa("device open");//TODO: remove
		}

	//freenect_set_depth_buffer(x->device, x->depth_back);
	//freenect_set_video_buffer(x->device, x->rgb_back);
	
	freenect_set_depth_callback(x->device, depth_callback);
	freenect_set_video_callback(x->device, rgb_callback);
	if(x->format.a_w.w_sym == s_ir){
		freenect_set_video_mode(x->device, freenect_find_video_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_VIDEO_IR_8BIT));
	}
	else{
		freenect_set_video_mode(x->device, freenect_find_video_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_VIDEO_RGB));
	}
	
	//TODO: add FREENECT_DEPTH_REGISTERED mode
	//FREENECT_DEPTH_REGISTERED   = 4, /**< processed depth data in mm, aligned to 640x480 RGB */
	//FREENECT_DEPTH_11BIT
	if (x->aligndepth==1)
	{
	postNesa("Depth is aligned to color");
		freenect_set_depth_mode(x->device, freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_DEPTH_REGISTERED));
	}
	else 
	{
		freenect_set_depth_mode(x->device, freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_DEPTH_11BIT));
	}
		//freenect_set_video_buffer(x->device, rgb_back);
	
	//Store a pointer to this object in the freenect device struct (for use in callbacks)
	freenect_set_user(x->device, x);  
	
	freenect_set_led(x->device,LED_RED);
	
	//freenect_set_tilt_degs(x->device,x->tilt);
	
	freenect_start_depth(x->device);
	freenect_start_video(x->device);
	
	x->is_open = TRUE;
	open_device_count++;
	freenect_active=TRUE;
}
Exemple #19
0
void depth(freenect_device *kn_dev, void *depthbuf, uint32_t timestamp)
{
	struct kingrid_info *data = freenect_get_user(kn_dev);
	uint16_t *buf = (uint16_t *)depthbuf;
	int small_histogram[data->divisions][data->divisions][SM_HIST_SIZE];
	int total[data->divisions][data->divisions];
	uint16_t min[data->divisions][data->divisions];
	uint16_t max[data->divisions][data->divisions];
	uint16_t median[data->divisions][data->divisions];
	float avg[data->divisions][data->divisions];
	int oor_count[data->divisions][data->divisions];
	int div_pix[data->divisions][data->divisions];
	int oor_total = 0; // Out of range count
	int i, j, medcount, histcount;

	// Initialize data structures
	memset(small_histogram, 0, sizeof(small_histogram));
	memset(total, 0, sizeof(total));
	memset(min, 0xff, sizeof(min));
	memset(max, 0, sizeof(max));
	memset(oor_count, 0, sizeof(oor_count));
	memset(div_pix, 0, sizeof(oor_count));

	// Fill in grid stats
	for(i = 0; i < FREENECT_FRAME_PIX; i++) {
		int gridx = PX_TO_GRIDX(i);
		int gridy = PX_TO_GRIDY(i);

		div_pix[gridy][gridx]++; // TODO: Calculate this only once
		if(buf[i] == 2047) {
			oor_count[gridy][gridx]++;
			oor_total++;
			continue;
		}

		small_histogram[gridy][gridx][buf[i] * SM_HIST_SIZE / 1024]++;

		if(buf[i] < min[gridy][gridx]) {
			min[gridy][gridx] = buf[i];
		}
		if(buf[i] > max[gridy][gridx]) {
			max[gridy][gridx] = buf[i];
		}
		total[gridy][gridx] += buf[i];
	}

	// Calculate grid averages
	for(i = 0; i < data->divisions; i++) {
		for(j = 0; j < data->divisions; j++) {
			if(oor_count[i][j] < div_pix[i][j]) {
				avg[i][j] = (double)total[i][j] / (double)(div_pix[i][j] - oor_count[i][j]);

				// FIXME: Something is wrong with median calculation
				for(medcount = 0, histcount = 0; histcount < SM_HIST_SIZE; histcount++) {
					medcount += small_histogram[i][j][histcount];
					if(medcount >= (div_pix[i][j] - oor_count[i][j]) / 2) {
						break;
					}
				}
				median[i][j] = (histcount * 1024 + (SM_HIST_SIZE / 2)) / SM_HIST_SIZE;
			} else {
				min[i][j] = 2047;
				max[i][j] = 2047;
				avg[i][j] = 2047;
				median[i][j] = 2047;
			}
		}
	}

	// Display grid stats
	printf("\e[H\e[2J");
	INFO_OUT("time: %u frame: %d out: %d%%\n", timestamp, data->frame, oor_total * 100 / FREENECT_FRAME_PIX);
	for(i = 0; i < data->divisions; i++) {
		if(data->disp_mode != ASCII) {
			grid_hline(data);
		}

		switch(data->disp_mode) {
			case STATS:
				// This would be an interesting use of lambdas to return the
				// value for a given column, allowing a "grid_row" function to
				// be produced:
				// grid_row("Pix %d", int lambda(int j) { return div_pix[i][j]; })

				for(j = 0; j < data->divisions; j++) {
					grid_entry(data, "Pix %d", div_pix[i][j]);
				}
				puts("|");

				for(j = 0; j < data->divisions; j++) {
					grid_entry(data, "Avg %f", lutf(data, avg[i][j]));
				}
				puts("|");

				for(j = 0; j < data->divisions; j++) {
					grid_entry(data, "Min %f", data->depth_lut[min[i][j]]);
				}
				puts("|");

				for(j = 0; j < data->divisions; j++) {
					grid_entry(data, "Med ~%f", data->depth_lut[median[i][j]]);
				}
				puts("|");

				for(j = 0; j < data->divisions; j++) {
					grid_entry(data, "Max %f", data->depth_lut[max[i][j]]);
				}
				puts("|");

				for(j = 0; j < data->divisions; j++) {
					grid_entry(data, "Out %d%%", oor_count[i][j] * 100 / div_pix[i][j]);
				}
				puts("|");
				break;

			case HISTOGRAM:
				for(histcount = 0; histcount < data->histrows; histcount++) {
					for(j = 0; j < data->divisions; j++) {
						int l, val = 0;
						if(i != i && i == 2 && j == 4 && histcount == 0) { // XXX : this block is for debugging and won't be entered
							printf("\n");
							for(l = 0; l < SM_HIST_SIZE; l++) {
								INFO_OUT("%d (%f): %d\n",
										l * 1024 / SM_HIST_SIZE,
										data->depth_lut[l * 1024 / SM_HIST_SIZE],
										small_histogram[i][j][l]);
							}
							printf("\n");
						}
						for(l = 0; l < SM_HIST_SIZE / data->histrows; l++) {
							val += small_histogram[i][j][histcount + l];
						}
						grid_bar(data, '*', val * 40 * data->histrows / div_pix[i][j]);
					}
					puts("|");
				}
				break;

			case ASCII:
				for(i = 0; i < data->divisions; i++) {
					for(j = 0; j < data->divisions; j++) {
						int c = (int)((data->depth_lut[min[i][j]] - data->zmin) * 
								4.0f / (data->zmax - data->zmin));
						if(c > 5) {
							c = 5;
						} else if(c < 0) {
						       c = 0;
						}
						if(min[i][j] == 2047) {
							c = 6;
						}

						// 1st character is closest, 5th character farthest
						// 6th character is shown for out-of-range areas
						putchar("8%+-._ "[c]);
					}
					putchar('\n');
				}
				break;
		}
	}
	if(data->disp_mode != ASCII) {
		grid_hline(data);
	}

	fflush(stdout);

	// Make LED red if more than 35% of the image is out of range (can't
	// set LED in callback for some reason)
	data->out_of_range = oor_total > FREENECT_FRAME_PIX * 35 / 100;

	data->frame++;
}
Exemple #20
0
void KinectDriver::irCbInternal (freenect_device *dev, freenect_pixel_ir *buf, uint32_t timestamp)
{
    KinectDriver* driver = reinterpret_cast<KinectDriver*>(freenect_get_user(dev));
    driver->irCb(dev, buf, timestamp);
}