// This function is called each time the DSP receives a new picture 
void userProcessColorImageFunc_laser(bgr *ptrImage) {
	

	if (ptrImage != NULL) {


		
		
		// add your vision processing code here
		
		
		
		
        // Send image to Color LCD if LCD ready for new data
		if (updateLCD) {  
			
			updateLCD = 0;
			
			BCACHE_inv((void *)(ADDR_VIDEO_DATA_BASE+0x1A900),IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);
			// Flush or write back source
	        BCACHE_wb ((void *)ptrImage,IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);
			//Need to clean the cache here
			EDMA3_1_Regs->PARAMENTRY[33].OPT = 0x0011E00C;				
			EDMA3_1_Regs->PARAMENTRY[33].SRC = (unsigned int)Image_data;			
			EDMA3_1_Regs->PARAMENTRY[33].A_B_CNT = 0x004004A4;   //Maybe to ACNT = 1188 and BCNT = 64
			EDMA3_1_Regs->PARAMENTRY[33].DST = (ADDR_VIDEO_DATA_BASE+LCD_IMAGE_OFFSET);
			EDMA3_1_Regs->PARAMENTRY[33].SRC_DST_BIDX = 0x04A404A4;
			EDMA3_1_Regs->PARAMENTRY[33].LINK_BCNTRLD = 0x0000FFFF;  // Null link
			EDMA3_1_Regs->PARAMENTRY[33].SRC_DST_CIDX = 0x0;										
			EDMA3_1_Regs->PARAMENTRY[33].CCNT = 0x1;  //Last command triggers transmission	
			
		}
	
		// If Linux is ready for another full 176X144 RGB image start the EDMA transfer of the image to external memory
		if (GET_IMAGE_TO_LINUX) {

			// Invalidate Destination
			BCACHE_inv((void *)Linux_Image,IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);
			// Flush or write back source
	        BCACHE_wb ((void *)ptrImage,IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);

			EDMA3_1_Regs->PARAMENTRY[32].OPT = 0x0011F00C;				
			EDMA3_1_Regs->PARAMENTRY[32].SRC = (unsigned int)Image_data;			
			EDMA3_1_Regs->PARAMENTRY[32].A_B_CNT = 0x004004A4;   //Maybe to ACNT = 1188 and BCNT = 64
			EDMA3_1_Regs->PARAMENTRY[32].DST = (ADDR_VIDEO_DATA_BASE+LINUX_IMAGE_OFFSET);
			EDMA3_1_Regs->PARAMENTRY[32].SRC_DST_BIDX = 0x04A404A4;
			EDMA3_1_Regs->PARAMENTRY[32].LINK_BCNTRLD = 0x0000FFFF;  // Null link
			EDMA3_1_Regs->PARAMENTRY[32].SRC_DST_CIDX = 0x0;										
			EDMA3_1_Regs->PARAMENTRY[32].CCNT = 0x1;  //Last command triggers transmission		
			
		}
		
	}  // Ends if statement to see if image pointer is null				
	
}
Example #2
0
void dprintf (const char * format, ...) 
{ 
  int n; 
  va_list args; 
  va_start (args, format); 
  n = vsprintf (printf_buffer,format, args); 
  va_end (args); 
  if (n<=0) return; 
  // writeback cache: 
  BCACHE_wb (printf_buffer, n, 1); 
  // notify GPP: 
  NOTIFY_notify (ID_GPP, 0, 6, (Uint32)printf_buffer); 
  // wait for GPP acknowledge 
  SEM_pendBinary (dprint_sema, SYS_FOREVER); 
} 
/**
* \brief   EDMA3 Cache Flush
*
*  This function flushes (cleans) the Cache
*
*  \param  mem_start_ptr [IN]      Starting adress of memory. Please note that
*                                  this should be 32 bytes alinged.
*  \param  num_bytes [IN]          length of buffer
* \return  nil return value
*/
void Edma3_CacheFlush(unsigned int mem_start_ptr,
                      unsigned int num_bytes)
{
  /* Verify whether the start address is 128-bytes aligned or not */
  if((mem_start_ptr & (0x7FU))    !=    0)
  {
#ifdef EDMA3_DRV_DEBUG
    EDMA3_DRV_PRINTF("\r\n Cache : Memory is not 128 bytes alinged\r\n");
#endif
  }
  
  BCACHE_wb ((void *)mem_start_ptr,
    num_bytes,
    EDMA3_CACHE_WAIT);
}
// This function is called each time the DSP receives a new picture 
void userProcessColorImageFunc_laser(bgr *ptrImage) {
	
	int i;	
	if (toggle)
	{
		// seeing blue golf ball
		specs_h = 131;
		specs_hrad = 26;
		if((specs_h-specs_hrad)<0) // wrap 0->360
		{
			specs_h2=specs_h+256;
		}
		else // wrap 360->0
		{
			specs_h2=specs_h-256;
		}
		specs_s = 91;
		specs_srad = 36;
		specs_v = 121;
		specs_vrad = 64;
	}
	else
	{
		// seeing green
		specs_h = 91;
		specs_hrad = 25;
		if((specs_h-specs_hrad)<0) // wrap 0->360
		{
			specs_h2=specs_h+256;
		}
		else // wrap 360->0
		{
			specs_h2=specs_h-256;
		}
		specs_s = 157;
		specs_srad = 50;
		specs_v = 170;
		specs_vrad = 25;
	}

	if (ptrImage != NULL) {
		
		// Initialize all arrays for equivalency
		for (i=0; i < MAX_NUM_EQUIVALENCIES; i++) {
			equivalency_objects[i] = 0; // initialze link array
			object_stats[i].num_pixels_in_object = 0;
			object_stats[i].sum_r = 0;
			object_stats[i].sum_c = 0;
		}

		num_unique_objects = 0;
		
		object_detected = 0;
		
        // First Pass thru image.  Convert RGB to HSV.  This code is taking into account that the robot's camera only returns
        // a value between 16 and 240 for pixel intensity.  It also adds a gain of 2 to the blue intensity.  
		for (r=0;r<IMAGE_ROWS;r++) {
			for(c=0;c<IMAGE_COLUMNS;c++) {
				
				red =  ((ptrImage[r*IMAGE_COLUMNS+c].red - 16)*255)/224;
				green = ((ptrImage[r*IMAGE_COLUMNS+c].green - 16)*255)/224;
				blue = ptrImage[r*IMAGE_COLUMNS+c].blue*2;

				if (blue > 240) {
					blue = 240;
				}
				blue = ((blue - 16)*255)/224;

				min = my_min( red, green, blue );
	 			value = my_max( red, green, blue );

			 	delta = value - min;
			 	if( value != 0 ) {
			 		sat = (delta*255) / value;		// s

			 		if (delta != 0) {
					 	if( red == value )
					 		hue = 60*( green - blue ) / delta;		// between yellow & magenta
					 	else if( green == value )
					 		hue = 120 + 60*( blue - red ) / delta;	// between cyan & yellow
					 	else
					 		hue = 240 + 60*( red - green ) / delta;	// between magenta & cyan
					 	if( hue < 0 )
					 		hue += 360;
					} else {
					 	hue = 0;
					 	sat = 0;
					}
			 	} else {
			 		// r = g = b = 0		// s = 0, v is undefined
			 		sat = 0;
			 		hue = 0;
			 	}

			 	hue = (hue*255)/360;


				if ( (abs(sat-specs_s)<=specs_srad)
                  && (abs(value-specs_v)<=specs_vrad)
                  && (   (abs(hue-specs_h)<=specs_hrad)
                      || (abs(hue-specs_h2)<=specs_hrad) // catch the hue wraparround
                     )
                   ) { 			 	

					object_detected = 1;	// Set a flag that at least one pixel found above threshold
					
					// -------- Connectivity calculations ------------
					// Labels pixels 1 to MAX_NUM_EQUIVALENCIES depending on top and left neighbors
					// The labels represent object number...
					if (r == 0) top = 0;  else top = Thres_Image[(r-1)*IMAGE_COLUMNS+c];  // previous row, same column												
					if (c == 0) left = 0; else left = Thres_Image[r*IMAGE_COLUMNS+(c-1)];  // same row, previous column
					
					neighbor_type = 0;
					if (left != 0) neighbor_type += 1;
					if (top != 0)  neighbor_type += 2;
													
					current_object = 0;				
					switch (neighbor_type) {
						case 0: // Both neighbors zero, New object needed
                            if (num_unique_objects < (MAX_NUM_EQUIVALENCIES-1) ) {
				            	num_unique_objects++;
                                equivalency_objects[num_unique_objects] = num_unique_objects;
				            } else {
                                too_many_objects++;
                            }
							current_object = num_unique_objects;
							break;	
						case 1:	// Top is zero, left is not zero
							current_object = left;
							break;
						case 2:	// Left is zero, top is not zero
							current_object = top;
							break;
						case 3:	// Top and left are not zero... must note equivalency
							if (top == left) current_object = left;
							else {
								if (Check_Equivalency(top,left) == 0) {
									current_object = Set_Equivalency(top,left);																
								}
								else {
									current_object = left; 
								}
							}
							break;
						default: // Should NEVER enter here
							current_object = 0;  // Object 0 stores errors
							break;
					}
					Thres_Image[r*IMAGE_COLUMNS+c] = current_object;
					object_stats[current_object].num_pixels_in_object +=1;
					object_stats[current_object].sum_r += r;						
					object_stats[current_object].sum_c += c;						
					// ---------- Done with connectivity calculations (first pass) ----------
												
				} else {
					Thres_Image[r*IMAGE_COLUMNS+c] = 0;
				}
				
				
				
			 }
		}

		// initialize final object stats
		for (i=1; i<= MAX_NUM_OBJECTS; i++) {
			final_object_stats[i].sum_r = 0;
			final_object_stats[i].sum_c = 0;
			final_object_stats[i].num_pixels_in_object = 0;
			final_object_stats[i].center_r = 0.0;
			final_object_stats[i].center_c = 0.0;
			final_object_stats[i].C02_sum = 0.0;
			final_object_stats[i].C11_sum = 0.0;
			final_object_stats[i].C20_sum = 0.0;
			final_object_stats[i].theta = 0.0;
		}

		if (object_detected == 0) {
			num_unique_objects = 0;
		}
		else {
			num_unique_objects = Fix_Equivalency(num_unique_objects);// num_unique_objects contains the number of initial equivalencies found
		}
		
        if (num_unique_objects > MAX_NUM_OBJECTS) num_unique_objects = MAX_NUM_OBJECTS;


		// Third pass: correct image for nice display and calculate object moments
        // This is commented out because for the 176X144 image this adds a large amount of proccessing time when a large blob is found
//		for (r=0; r < IMAGE_ROWS; r++) {	// Loop over rows
//			for (c=0; c < IMAGE_COLUMNS; c++) {		// Loop over columns
//				if (Thres_Image[r*IMAGE_COLUMNS+c] > 0) {
//					// Fix pixel equivalency
//					Thres_Image[r*IMAGE_COLUMNS+c] = equivalency_objects[Thres_Image[r*IMAGE_COLUMNS+c]];
//
//					// Calculate second moments here
//					if ((Thres_Image[r*IMAGE_COLUMNS+c] > 0) && (Thres_Image[r*IMAGE_COLUMNS+c] <= MAX_NUM_OBJECTS)) {
//						final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].C02_sum += (c - final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].center_c)*(c - final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].center_c);
//						final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].C11_sum += (r - final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].center_r)*(c - final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].center_c);
//						final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].C20_sum += (r - final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].center_r)*(r - final_object_stats[Thres_Image[r*IMAGE_COLUMNS+c]].center_r);
//					}
//
//				}
//			}
//		}

		// Find largest object and calculate object orientation
		largest_num_pixels = 0;
		largest_object = 1;
		for (k = 1; k <= num_unique_objects ; k++) {				
			if (final_object_stats[k].num_pixels_in_object > largest_num_pixels) {
				largest_num_pixels = final_object_stats[k].num_pixels_in_object;
				largest_object = k;
			}	
			  // find theta of found blob
			  // commented out because moments need to be calculated above to use this code.
//            if (final_object_stats[k].num_pixels_in_object > NUMPIXELS_TO_CALC_ANGLE) {
//                // Calculate the object orientation angle if there are NUMPIXELS_TO_CALC_ANGLE in the object
//                Cdifference = final_object_stats[k].C20_sum - final_object_stats[k].C02_sum;
//                if (Cdifference != 0.0F) { // can't divide by zero
//                    final_object_stats[k].theta = atansp(final_object_stats[k].C11_sum/Cdifference)/2.0F;
//                } else {
//                    final_object_stats[k].theta = 0.0;
//                }
//                if (final_object_stats[k].C20_sum > final_object_stats[k].C02_sum) {
//                    if (final_object_stats[k].theta < 0) final_object_stats[k].theta += PI/2.0F;
//                    else final_object_stats[k].theta += -PI/2.0F;
//                }
//            } else {
//                final_object_stats[k].theta = 0;
//            }
            
		} // Ends loop through objects
		
		// Find the middle
		if (new_vision_data == 0)
		{
			if (toggle)
			{
				blue_rbar = (int) (final_object_stats[largest_object].center_r);
				blue_cbar = (int) (final_object_stats[largest_object].center_c);
				blueNumPixels = (int) final_object_stats[largest_object].num_pixels_in_object;
				toggle = 0;
			}
			else
			{
				green_rbar = (int) (final_object_stats[largest_object].center_r);
				green_cbar = (int) (final_object_stats[largest_object].center_c);
				greenNumPixels = (int) final_object_stats[largest_object].num_pixels_in_object;
				toggle = 1;
			}

			new_vision_data = 1;
		}

		// pass data to RobotControl()
		if (new_coordata == 0) {
			if (final_object_stats[largest_object].num_pixels_in_object > 1) {
				noimagefound = 0;
				new_num_found_objects = num_unique_objects;
				object_x = cbar - IMAGE_COLUMNS/2;
				object_y = rbar - IMAGE_ROWS/2;
				numpels = final_object_stats[largest_object].num_pixels_in_object;
				//new_object_theta 	= final_object_stats[largest_object].theta;
				//new_C20				= final_object_stats[largest_object].C20_sum;
				//new_C02				= final_object_stats[largest_object].C02_sum;
				new_coordata = 1;
			} else {
				noimagefound = 1;
				new_num_found_objects = num_unique_objects;
				object_x = 0.0;
				object_y = 0.0;
				numpels = 0;
				//new_object_theta = 0.0;
				//new_C20 = 0.0;
				//new_C02 = 0.0;
				new_coordata = 1;
			}
		}

        //create green x for largest centroid position
        if (final_object_stats[largest_object].num_pixels_in_object > 6) {

            ptrImage[rbar*IMAGE_COLUMNS+cbar].red = 0;
            ptrImage[rbar*IMAGE_COLUMNS+cbar].blue = 0;
            ptrImage[rbar*IMAGE_COLUMNS+cbar].green = 255;
            if (rbar > 0) {
                ptrImage[(rbar-1)*IMAGE_COLUMNS+cbar].red =  0;
                ptrImage[(rbar-1)*IMAGE_COLUMNS+cbar].blue = 0;
                ptrImage[(rbar-1)*IMAGE_COLUMNS+cbar].green = 255;
            }
            if (rbar < (IMAGE_ROWS-1)) {
                ptrImage[(rbar+1)*IMAGE_COLUMNS+cbar].red = 0;
                ptrImage[(rbar+1)*IMAGE_COLUMNS+cbar].blue = 0;
                ptrImage[(rbar+1)*IMAGE_COLUMNS+cbar].green = 255;
            }
            if (cbar > 0) {
                ptrImage[rbar*IMAGE_COLUMNS+(cbar-1)].red = 0;
                ptrImage[rbar*IMAGE_COLUMNS+(cbar-1)].blue = 0;
                ptrImage[rbar*IMAGE_COLUMNS+(cbar-1)].green = 255;
            }
            if (cbar < (IMAGE_COLUMNS-1)) {
                ptrImage[rbar*IMAGE_COLUMNS+(cbar+1)].red = 0;
                ptrImage[rbar*IMAGE_COLUMNS+(cbar+1)].blue = 0;
                ptrImage[rbar*IMAGE_COLUMNS+(cbar+1)].green = 255;
            }
        }


        if ( 12 == switchstate) {
            UpdateLCDwithLADAR(ptrImage,1);
        }

        // Send image to Color LCD if LCD ready for new data
		if (updateLCD) {

			updateLCD = 0;

			BCACHE_inv((void *)(ADDR_VIDEO_DATA_BASE+0x1A900),IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);
			// Flush or write back source
	        BCACHE_wb ((void *)ptrImage,IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);
			//Need to clean the cache here
			EDMA3_1_Regs->PARAMENTRY[33].OPT = 0x0011E00C;
			EDMA3_1_Regs->PARAMENTRY[33].SRC = (unsigned int)Image_data;
			EDMA3_1_Regs->PARAMENTRY[33].A_B_CNT = 0x004004A4;   //Maybe to ACNT = 1188 and BCNT = 64
			EDMA3_1_Regs->PARAMENTRY[33].DST = (ADDR_VIDEO_DATA_BASE+LCD_IMAGE_OFFSET);
			EDMA3_1_Regs->PARAMENTRY[33].SRC_DST_BIDX = 0x04A404A4;
			EDMA3_1_Regs->PARAMENTRY[33].LINK_BCNTRLD = 0x0000FFFF;  // Null link
			EDMA3_1_Regs->PARAMENTRY[33].SRC_DST_CIDX = 0x0;
			EDMA3_1_Regs->PARAMENTRY[33].CCNT = 0x1;  //Last command triggers transmission

		}

		// If Linux is ready for another full 176X144 RGB image start the EDMA transfer of the image to external memory
		if (GET_IMAGE_TO_LINUX) {

			// Invalidate Destination
			BCACHE_inv((void *)Linux_Image,IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);
			// Flush or write back source
	        BCACHE_wb ((void *)ptrImage,IMAGE_ROWS*IMAGE_COLUMNS*3,EDMA3_CACHE_WAIT);

			EDMA3_1_Regs->PARAMENTRY[32].OPT = 0x0011F00C;
			EDMA3_1_Regs->PARAMENTRY[32].SRC = (unsigned int)Image_data;
			EDMA3_1_Regs->PARAMENTRY[32].A_B_CNT = 0x004004A4;   //Maybe to ACNT = 1188 and BCNT = 64
			EDMA3_1_Regs->PARAMENTRY[32].DST = (ADDR_VIDEO_DATA_BASE+LINUX_IMAGE_OFFSET);
			EDMA3_1_Regs->PARAMENTRY[32].SRC_DST_BIDX = 0x04A404A4;
			EDMA3_1_Regs->PARAMENTRY[32].LINK_BCNTRLD = 0x0000FFFF;  // Null link
			EDMA3_1_Regs->PARAMENTRY[32].SRC_DST_CIDX = 0x0;
			EDMA3_1_Regs->PARAMENTRY[32].CCNT = 0x1;  //Last command triggers transmission

		}
		
	}  // Ends if statement to see if image pointer is null				
	
}
void ComWithLinux(void) {

	int i = 0;
	TSK_sleep(100);

	while(1) {

		BCACHE_inv((void *)ptrshrdmem,sizeof(sharedmemstruct),EDMA3_CACHE_WAIT);
		
		if (GET_DATA_FROM_LINUX) {

			if (newnavdata == 0) {
				newvref = ptrshrdmem->Floats_to_DSP[0];
				newturn = ptrshrdmem->Floats_to_DSP[1];
				newnavdata = 1;
			}

			CLR_DATA_FROM_LINUX;

		}

		if (GET_LVDATA_FROM_LINUX) {

			if (ptrshrdmem->DSPRec_size > 256) ptrshrdmem->DSPRec_size = 256;
				for (i=0;i<ptrshrdmem->DSPRec_size;i++) {
					fromLinuxstring[i] = ptrshrdmem->DSPRec_buf[i];
				}
				fromLinuxstring[i] = '\0';

				if (new_LV_data == 0) {
					sscanf(fromLinuxstring,"%f%f",&LVvalue1,&LVvalue2);
					new_LV_data = 1;
				}

			CLR_LVDATA_FROM_LINUX;

		}

		if ((tskcount%6)==0) {
			if (GET_LVDATA_TO_LINUX) {

				// Default
				ptrshrdmem->DSPSend_size = sprintf(toLinuxstring,"1.0 1.0 1.0 1.0");
				// you would do something like this
				//ptrshrdmem->DSPSend_size = sprintf(toLinuxstring,"%.1f %.1f %.1f %.1f",var1,var2,var3,var4);

				for (i=0;i<ptrshrdmem->DSPSend_size;i++) {
					ptrshrdmem->DSPSend_buf[i] = toLinuxstring[i];
				}

				// Flush or write back source
				BCACHE_wb((void *)ptrshrdmem,sizeof(sharedmemstruct),EDMA3_CACHE_WAIT);

				CLR_LVDATA_TO_LINUX;

			}
		}
		
		if (GET_DATAFORFILE_TO_LINUX) {
			// First make sure all scratch elements are zero
			for (i=0;i<500;i++) {
				ptrshrdmem->scratch[i] = 0;
		}
			// Write LADARdataX to scratch
			for (i=0;i<228;i++) {
				ptrshrdmem->scratch[i] = LADARdataX[i];
		}
			// Write LADARdataY to scratch
			for (i=0;i<228;i++) {
				ptrshrdmem->scratch[228+i] = LADARdataY[i];
		}
			// Flush or write back source
			BCACHE_wb((void *)ptrshrdmem,sizeof(sharedmemstruct),EDMA3_CACHE_WAIT);

			CLR_DATAFORFILE_TO_LINUX;
		}


		tskcount++;
		TSK_sleep(40);
	}


}