Example #1
0
//------------------------------------------------------------------------------
/// @brief Performs fcv filter functions
/// @param data Pointer to source image data
/// @param w Width of data
/// @param y Height of data
/// @param data Pointer to destination image data
//------------------------------------------------------------------------------
void updateFilter( uint8_t *pSrc, uint32_t w, uint32_t h, uint8_t *pDst )
{
	//Handles filter selection
	switch( filterState.filterType )
	{
		case ENABLE_GAUSS_3X3:
			fcvFilterGaussian3x3u8( pSrc, w, h, pDst, 0);
			break;
		case ENABLE_GAUSS_5X5:
			fcvFilterGaussian5x5u8( pSrc, w, h, pDst, 0);
			break;
		case ENABLE_GAUSS_11X11:
			fcvFilterGaussian11x11u8( pSrc, w, h, pDst, 0 );
			break;
		case ENABLE_CANNY:
			// Allocate new buffer
			  if( filterState.edgeImgBuf == NULL )
			  {
				  filterState.edgeImgWidth = w;
				  filterState.edgeImgHeight = h;
				  filterState.edgeImgBuf = (uint8_t*)fcvMemAlloc(filterState.edgeImgWidth*filterState.edgeImgHeight, 16 );
			  }
			fcvFilterCanny3x3u8( pSrc, filterState.edgeImgWidth, filterState.edgeImgHeight, filterState.edgeImgBuf, 10, 20);
			memcpy(pDst, pSrc, w*h);
			break;
		case ENABLE_ERODE:
			fcvFilterErode3x3u8( pSrc, w, h, pDst );
			break;
		case ENABLE_DILATE:
			fcvFilterDilate3x3u8( pSrc, w, h, pDst );
			break;
		case ENABLE_MEDIAN:
			fcvFilterMedian3x3u8( pSrc, w, h, pDst );
			break;
		case ENABLE_SOBEL:
			fcvFilterSobel3x3u8( pSrc, w, h, pDst );
			break;
		case ENABLE_THRESHOLD:
			fcvFilterThresholdu8( pSrc, w, h, pDst, 128 );
			break;
		case RESET:
			memcpy(pDst, pSrc, w*h);
			break;
		default:
			break;
	}
}
Example #2
0
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
uint8_t* getRenderBuffer( uint32_t w, uint32_t h )
{
   lockRenderBuffer();

   // Resize if necessary.
   if( w != state.renderBufWidth || h != state.renderBufHeight )
   {
      if( state.renderBufRGB565 != NULL )
      {
         fcvMemFree( state.renderBufRGB565 );
         state.renderBufRGB565 = NULL;
         state.renderBufSize = 0;
         state.renderBufWidth = 0;
         state.renderBufHeight = 0;
      }
   }

   // Allocate if necessary.
   if( state.renderBufRGB565 == NULL )
   {
      state.renderBufSize = w * h * 2;
      state.renderBufRGB565 = (uint8_t*) fcvMemAlloc(state.renderBufSize, 16);
      state.renderBufWidth = w;
      state.renderBufHeight = h;
      
      glEnable(GL_TEXTURE_2D);
      glEnable(GL_BLEND);
      glDisable(GL_DITHER);
      glDisable(GL_DEPTH_TEST);
      glBlendFunc(GL_ONE, GL_SRC_COLOR);

      // Initialize mutex
      pthread_mutexattr_t  attribute;
      pthread_mutexattr_init( &attribute );
      pthread_mutexattr_settype( &attribute, PTHREAD_MUTEX_NORMAL );
      pthread_mutex_init( &state.mutex, &attribute );
      pthread_mutexattr_destroy( &attribute );
   }

   unlockRenderBuffer();

   return state.renderBufRGB565;
}
Example #3
0
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
JNIEXPORT void JNICALL Java_com_qualcomm_fastcvdemo_apis_imageTransformation_Affine_update
(
   JNIEnv*     env,
   jobject     obj,
   jbyteArray  img,
   jint        w,
   jint        h
)
{
	jbyte*            jimgData = NULL;
	jboolean          isCopy = 0;
	uint32_t*         curCornerPtr = 0;
	uint8_t*          renderBuffer;
	uint64_t          time;
	float             timeMs;

	// Allocate the buffer once here if it's not allocated already
	if( affineState.affineImgBuf == NULL)
	{
		int frameSize = w*h*3/2;
		affineState.affineImgBuf = (uint8_t *)fcvMemAlloc(frameSize, 16);
		if( affineState.affineImgBuf == NULL )
		{
			EPRINTF("Allocate affineImgBuf failed");
		}
		else
		{
			memset(affineState.affineImgBuf, 0, w*h);
			memset(affineState.affineImgBuf+(w*h), 128, w*h/2);
		}
	}

	// Get data from JNI
	jimgData = env->GetByteArrayElements( img, &isCopy );

	renderBuffer = getRenderBuffer( w, h );

	lockRenderBuffer();

	time = util.getTimeMicroSeconds();

	uint8_t* pJimgData    = (uint8_t*)jimgData;

	// Check if camera image data is not aligned.
	if( (int)jimgData & 0xF )
	{
		// Allow for rescale if dimensions changed.
		if( w != (int)affineState.alignedImgWidth ||
			h != (int)affineState.alignedImgHeight )
		{
			if( affineState.alignedImgBuf != NULL )
			{
				DPRINTF( "%s %d Creating aligned for preview\n",
						__FILE__, __LINE__ );
				fcvMemFree( affineState.alignedImgBuf );
				affineState.alignedImgBuf = NULL;
			}
		}

		// Allocate buffer for aligned data if necessary.
		if( affineState.alignedImgBuf == NULL )
		{
			affineState.alignedImgWidth = w;
			affineState.alignedImgHeight = h;
			affineState.alignedImgBuf = (uint8_t*)fcvMemAlloc( w*h*3/2, 16 );
		}

		memcpy( affineState.alignedImgBuf, jimgData, w*h*3/2 );
		pJimgData = affineState.alignedImgBuf;
	}

	// Perform FastCV Function processing
	switch( affineState.affineType )
	{
		case AFFINE_U8:
			updateAffine( (uint8_t*)pJimgData, w, h, affineState.affineImgBuf);
			colorConvertYUV420ToRGB565Renderer(affineState.affineImgBuf, w, h, (uint32_t*)renderBuffer );
			break;

		case AFFINE_8X8:
			updateAffine( (uint8_t*)pJimgData, w, h, affineState.affineImgBuf);
			colorConvertYUV420ToRGB565Renderer(affineState.affineImgBuf, w, h, (uint32_t*)renderBuffer );
			break;

		case NO_AFFINE:
		default:
			colorConvertYUV420ToRGB565Renderer(pJimgData, w, h, (uint32_t*)renderBuffer );
			break;
	}

	// Update image
	timeMs = ( util.getTimeMicroSeconds() - time ) / 1000.f;
	util.setProcessTime((util.getProcessTime()*(29.f/30.f))+(float)(timeMs/30.f));

	unlockRenderBuffer();

	// Let JNI know we don't need data anymore
	env->ReleaseByteArrayElements( img, jimgData, JNI_ABORT );
}
Example #4
0
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
JNIEXPORT void JNICALL Java_com_qualcomm_fastcvdemo_apis_imageProcessing_Filter_update
(
   JNIEnv*     env,
   jobject     obj,
   jbyteArray  img,
   jint        w,
   jint        h
)
{
   jbyte*            jimgData = NULL;
   jboolean          isCopy = 0;
   uint32_t*         curCornerPtr = 0;
   uint8_t*          renderBuffer;
   uint64_t          time;
   float             timeMs;

   // Allocate the buffer once here if it's not allocated already
   if( filterState.filteredImgBuf == NULL)
   {
      int frameSize = w*h*3/2;
      filterState.filteredImgBuf = (uint8_t *)fcvMemAlloc(frameSize, 16);
      if( filterState.filteredImgBuf == NULL )
      {
         EPRINTF("Allocate filteredImgBuf failed");
      }
      else
      {
         memset(filterState.filteredImgBuf, 128, frameSize);
      }
   }

   // Get data from JNI
   jimgData = env->GetByteArrayElements( img, &isCopy );

   renderBuffer = getRenderBuffer( w, h );

   lockRenderBuffer();

   time = util.getTimeMicroSeconds();

   // jimgData might not be 128 bit aligned.
   // fcvColorYUV420toRGB565u8() and other fcv functionality inside
   // require 128 bit memory aligned. In case of jimgData
   // is not 128 bit aligned, it will allocate memory that is 128 bit
   // aligned and copies jimgData to the aligned memory.

   uint8_t* pJimgData    = (uint8_t*)jimgData;
   uint8_t* pFilteringData = (uint8_t*)filterState.filteredImgBuf;

   // Check if camera image data is not aligned.
   if( (int)jimgData & 0xF )
   {
      // Allow for rescale if dimensions changed.
      if( w != (int)filterState.imgWidth ||
          h != (int)filterState.imgHeight )
      {
         if( filterState.alignedImgBuf != NULL )
         {
            DPRINTF( "%s %d Creating aligned for preview\n",
               __FILE__, __LINE__ );
            fcvMemFree( filterState.alignedImgBuf );
            filterState.alignedImgBuf = NULL;
         }
      }

      // Allocate buffer for aligned data if necessary.
      if( filterState.alignedImgBuf == NULL )
      {
    	  filterState.imgWidth = w;
    	  filterState.imgHeight = h;
    	  filterState.alignedImgBuf = (uint8_t*)fcvMemAlloc( w*h*3/2, 16 );
      }

      memcpy( filterState.alignedImgBuf, jimgData, w*h*3/2 );
      pJimgData = filterState.alignedImgBuf;
	}

   else if( w != (int)filterState.imgWidth ||
				h != (int)filterState.imgHeight )
   {
	   filterState.imgWidth = w;
	   filterState.imgHeight = h;
   }

   // Perform FastCV Function processing
   updateFilter( (uint8_t*)pJimgData, w, h, (uint8_t*)pFilteringData );

   // Copy the image first in our own buffer to avoid corruption during
   // rendering. Not that we can still have corruption in image while we do
   // copy but we can't help that.

   colorConvertYUV420ToRGB565Renderer(pFilteringData,
                            w,
                            h,
                            (uint32_t*)renderBuffer );

   // Update image
   timeMs = ( util.getTimeMicroSeconds() - time ) / 1000.f;
   util.setProcessTime((util.getProcessTime()*(29.f/30.f))+(float)(timeMs/30.f));

   if( filterState.filterType == ENABLE_CANNY )
   {
	   drawEdges(filterState.edgeImgBuf, filterState.edgeImgHeight, filterState.edgeImgWidth);
   }

   unlockRenderBuffer();

   // Let JNI know we don't need data anymore
   env->ReleaseByteArrayElements( img, jimgData, JNI_ABORT );
}
Example #5
0
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
JNIEXPORT void JNICALL
	Java_com_qualcomm_fastcvdemo_apis_imageProcessing_ImgDiff_update
(
   JNIEnv*     env,
   jobject     obj,
   jbyteArray  img,
   jint        w,
   jint        h
)
{
   jbyte*            jimgData = NULL;
   jboolean          isCopy = 0;
   uint32_t*         curCornerPtr = 0;
   uint8_t*          renderBuffer;
   uint64_t          time;
   float             timeMs;


   // Get data from JNI
   jimgData = env->GetByteArrayElements( img, &isCopy );

   DPRINTF("ImgDiff: update");

   renderBuffer = getRenderBuffer( w, h );

   lockRenderBuffer();

   time = util.getTimeMicroSeconds();

   // Allocate the buffer once here if it's not allocated already
   if( imgDiffState.referenceImgBuf == NULL)
   {
      imgDiffState.referenceImgBuf = (uint8_t *)fcvMemAlloc(w*h*3/2, 16);
      if( imgDiffState.referenceImgBuf == NULL )
      {
         EPRINTF("Allocate referenceImgBuf failed");
      }
   }

   // Allocate the buffer once here if it's not allocated already
   if( imgDiffState.diffImgBuf == NULL)
   {
	  int frameSize = w*h*3/2;
      imgDiffState.diffImgBuf = (uint8_t *)fcvMemAlloc(frameSize, 16);
      if( imgDiffState.diffImgBuf == NULL )
      {
         EPRINTF("Allocate diffImgBuf failed");
      }
      else
      {
    	  memset(imgDiffState.diffImgBuf, 0, w*h);
    	  memset(imgDiffState.diffImgBuf+(w*h), 128, w*h/2);
      }
   }

   uint8_t* pJimgData    = (uint8_t*)jimgData;
   uint8_t* pDiffData    = (uint8_t*)imgDiffState.diffImgBuf;

   // jimgData might not be 128 bit aligned.
   // fcvColorYUV420toRGB565u8() and other fcv functionality inside
   // require 128 bit memory aligned. In case of jimgData 
   // is not 128 bit aligned, it will allocate memory that is 128 bit
   // aligned and copies jimgData to the aligned memory.

   // Check if camera image data is not aligned.
   if( (int)jimgData & 0xF )
   {
      // Allow for rescale if dimensions changed.
      if( w != (int)imgDiffState.alignedImgWidth ||
          h != (int)imgDiffState.alignedImgHeight )
      {
         if( imgDiffState.alignedImgBuf != NULL )
         {
            DPRINTF( "%s %d Creating aligned for preview\n",
               __FILE__, __LINE__ );
            fcvMemFree( imgDiffState.alignedImgBuf );
            imgDiffState.alignedImgBuf = NULL;
         }
      }

      // Allocate buffer for aligned data if necessary.
      if( imgDiffState.alignedImgBuf == NULL )
      {
         imgDiffState.alignedImgWidth = w;
         imgDiffState.alignedImgHeight = h;
         imgDiffState.alignedImgBuf = (uint8_t*)fcvMemAlloc( w*h*3/2, 16 );
      }

      memcpy( imgDiffState.alignedImgBuf, jimgData, w*h*3/2 );
      pJimgData = imgDiffState.alignedImgBuf;
   }

   uint8_t* pPreviewData = pJimgData;

   //Handles reference frame state
   switch( imgDiffState.diffState )
   {
      case NO_REF_FRAME:
         break;

      case TAKE_REF_FRAME:
         DPRINTF("In take frame");
         memcpy(imgDiffState.referenceImgBuf, pPreviewData, w*h);
         imgDiffState.diffState = HAS_REF_FRAME;
         DPRINTF("ImgDiff: taken frame");
         break;

      case HAS_REF_FRAME:
         updateDiff( imgDiffState.referenceImgBuf, pJimgData, w, h, pDiffData );
         pPreviewData = pDiffData;
         break;

      case NEED_RESET:
         imgDiffState.diffState = NO_REF_FRAME;
         break;

      default:
         break; 
   }

   colorConvertYUV420ToRGB565Renderer(pPreviewData,
							w,
							h,
							(uint32_t*)renderBuffer );

   // Update image
   timeMs = ( util.getTimeMicroSeconds() - time ) / 1000.f;
   util.setProcessTime((util.getProcessTime()*(29.f/30.f))+(float)(timeMs/30.f));

   unlockRenderBuffer();

   // Let JNI know we don't need data anymore
   env->ReleaseByteArrayElements( img, jimgData, JNI_ABORT );
}