/*!***********************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_SURF_Compute_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;

	vx_image image_in = (vx_image) parameters[0];
	vx_image mask = (vx_image) parameters[1];
	vx_array array = (vx_array) parameters[2];
	vx_array DESP = (vx_array) parameters[3];
	vx_scalar hessianThreshold = (vx_scalar) parameters[4];
	vx_scalar nOctaves = (vx_scalar) parameters[5];
	vx_scalar nOctaveLayers = (vx_scalar) parameters[6];
	vx_scalar EXTENDED = (vx_scalar) parameters[7];
	vx_scalar UPRIGHT = (vx_scalar) parameters[8];

	Mat *mat, *mask_mat, Img;
	vx_float32 FloatValue = 0;
	vx_int32 value = 0;
	vx_bool extend, upright, value_b;
	float HessianThreshold;
	int NOctaves, NOctaveLayers;

	//Extracting Values from the Scalar 
	STATUS_ERROR_CHECK(vxReadScalarValue(hessianThreshold, &FloatValue)); HessianThreshold = FloatValue;
	STATUS_ERROR_CHECK(vxReadScalarValue(nOctaves, &value)); NOctaves = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(nOctaveLayers, &value)); NOctaveLayers = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(EXTENDED, &value_b)); extend = value_b;
	STATUS_ERROR_CHECK(vxReadScalarValue(UPRIGHT, &value_b)); upright = value_b;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mask_mat, mask));

	//Compute using OpenCV
	bool extended_B, upright_b;
	if (extend == vx_true_e) extended_B = true; else extended_B = false;
	if (upright == vx_true_e) upright_b = true; else upright_b = false;
	vector<KeyPoint> key_points;
	Mat Desp;
	Ptr<Feature2D> surf = xfeatures2d::SURF::create(HessianThreshold, NOctaves, NOctaveLayers);
	surf->detectAndCompute(*mat, *mask_mat, key_points, Desp);

	//Converting OpenCV Keypoints to OpenVX Keypoints
	STATUS_ERROR_CHECK(CV_to_VX_keypoints(key_points, array));

	if (extend == 1)
		STATUS_ERROR_CHECK(CV_DESP_to_VX_DESP(Desp, DESP, 512));
	if (extend != 1)
		STATUS_ERROR_CHECK(CV_DESP_to_VX_DESP(Desp, DESP, 256));

	return status;
}
/*!***********************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_orb_detector_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;
	vx_image image_in = (vx_image) parameters[0];
	vx_image mask = (vx_image) parameters[1];
	vx_array array = (vx_array) parameters[2];
	vx_scalar NFEATURES = (vx_scalar) parameters[3];
	vx_scalar SCALEFAC = (vx_scalar) parameters[4];
	vx_scalar NLEVELS = (vx_scalar) parameters[5];
	vx_scalar EDGETHRES = (vx_scalar) parameters[6];
	vx_scalar FIRSTLEVEL = (vx_scalar) parameters[7];
	vx_scalar WTA_k = (vx_scalar) parameters[8];
	vx_scalar SCORETYPE = (vx_scalar) parameters[9];
	vx_scalar PATCHSIZE = (vx_scalar) parameters[10];

	Mat *mat, *mask_mat, Img;
	int nFeatures, nLevels, edgeThreshold, firstLevel, WTA_K, scoreType, patchSize;
	float  ScaleFactor;
	vector<KeyPoint> key_points;
	vx_int32 value = 0;
	vx_float32 value_F = 0;

	//Extracting Values from the Scalar 
	STATUS_ERROR_CHECK(vxReadScalarValue(NFEATURES, &value));nFeatures = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(SCALEFAC, &value_F));ScaleFactor = value_F;
	STATUS_ERROR_CHECK(vxReadScalarValue(NLEVELS, &value));nLevels = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(EDGETHRES, &value));edgeThreshold = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(FIRSTLEVEL, &value));firstLevel = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(WTA_k, &value));WTA_K = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(SCORETYPE, &value));scoreType = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(PATCHSIZE, &value));patchSize = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mask_mat, mask));

	//Compute using OpenCV
	Ptr<Feature2D> orb = ORB::create(nFeatures, ScaleFactor, nLevels, edgeThreshold, firstLevel, WTA_K, scoreType, patchSize);
	orb->detect(*mat, key_points, *mask_mat);

	//OpenCV 2.4 Call
	//ORB orb(nFeatures, ScaleFactor, nLevels, edgeThreshold, firstLevel, WTA_K, scoreType, patchSize);
	//orb(*mat, *mask_mat, key_points); ////Compute using OpenCV

	//Converting OpenCV Keypoints to OpenVX Keypoints
	STATUS_ERROR_CHECK(CV_to_VX_keypoints(key_points, array));

	return status;
}
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_transpose_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;

	vx_image image_in = (vx_image) parameters[0];
	vx_image image_out = (vx_image) parameters[1];

	Mat *mat, bl;

	//Validation
	vx_uint32 width_in, height_in, width_out, height_out;
	width_in = height_in = width_out = height_out = 0;
	STATUS_ERROR_CHECK(vxQueryImage(image_in, VX_IMAGE_ATTRIBUTE_WIDTH, &width_in, sizeof(width_in)));
	STATUS_ERROR_CHECK(vxQueryImage(image_in, VX_IMAGE_ATTRIBUTE_HEIGHT, &height_in, sizeof(height_in)));
	STATUS_ERROR_CHECK(vxQueryImage(image_out, VX_IMAGE_ATTRIBUTE_WIDTH, &width_out, sizeof(width_out)));
	STATUS_ERROR_CHECK(vxQueryImage(image_out, VX_IMAGE_ATTRIBUTE_HEIGHT, &height_out, sizeof(height_out)));
	if (height_in != width_out || width_in != height_out) { status = VX_ERROR_INVALID_DIMENSION; return status; }

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	cv::transpose(*mat, bl);

	//Converting OpenCV Mat into VX Image
	STATUS_ERROR_CHECK(CV_to_VX_Image(image_out, &bl));

	return status;
}
コード例 #4
0
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_norm_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;
	vx_image image_in = (vx_image) parameters[0];
	vx_scalar scalar = (vx_scalar) parameters[1];
	vx_scalar scalar1 = (vx_scalar) parameters[2];

	Mat *mat;
	int Type;
	vx_int32 value = 0;

	//Extracting Values from the Scalar into Type
	STATUS_ERROR_CHECK(vxReadScalarValue(scalar1, &value));	Type = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	float NORM_Val = 0;
	NORM_Val = (float) norm(*mat, Type);

	//Converting int to Scalar
	STATUS_ERROR_CHECK(vxWriteScalarValue(scalar, &NORM_Val));

	return status;
}
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_fastNlMeansDenoisingColored_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;


	vx_image image_in = (vx_image) parameters[0];
	vx_image image_out = (vx_image) parameters[1];
	vx_scalar H = (vx_scalar) parameters[2];
	vx_scalar H_COLOR = (vx_scalar) parameters[3];
	vx_scalar Template_WS = (vx_scalar) parameters[4];
	vx_scalar Search_WS = (vx_scalar) parameters[5];
	Mat *mat, bl;
	int search_ws, template_ws;
	float h, h_color;
	vx_float32 value_f = 0;
	vx_int32 value = 0;

	//Extracting Values from the Scalar into Ksize and Ddepth
	STATUS_ERROR_CHECK(vxReadScalarValue(H, &value_f));h = value_f;
	STATUS_ERROR_CHECK(vxReadScalarValue(H_COLOR, &value_f));h_color = value_f;
	STATUS_ERROR_CHECK(vxReadScalarValue(Template_WS, &value));template_ws = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(Search_WS, &value));search_ws = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(match_vx_image_parameters(image_in, image_out));
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	cv::fastNlMeansDenoisingColored(*mat, bl, h, h_color, template_ws, search_ws);

	//Converting OpenCV Mat into VX Image
	STATUS_ERROR_CHECK(CV_to_VX_Image(image_out, &bl));

	return status;
}
コード例 #6
0
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_flip_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;

	vx_image image_in = (vx_image)parameters[0];
	vx_image image_out = (vx_image)parameters[1];
	vx_scalar scalar = (vx_scalar)parameters[2];
	Mat *mat, bl;
	int FlipCode;

	vx_int32 value = 0;

	//Extracting Values from the Scalar into FlipCode
	STATUS_ERROR_CHECK(vxReadScalarValue(scalar, &value)); FlipCode = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	cv::flip(*mat, bl, FlipCode); //output image size should correspond to the right flip code

	//Converting OpenCV Mat into VX Image
	STATUS_ERROR_CHECK(CV_to_VX_Image(image_out, &bl));

	return status;
}
コード例 #7
0
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_Boxfilter_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;

	vx_image image_in = (vx_image) parameters[0];
	vx_image image_out = (vx_image) parameters[1];
	vx_scalar scalar = (vx_scalar) parameters[2];
	vx_scalar S_width = (vx_scalar) parameters[3];
	vx_scalar S_height = (vx_scalar) parameters[4];
	vx_scalar A_X = (vx_scalar) parameters[5];
	vx_scalar A_Y = (vx_scalar) parameters[6];
	vx_scalar NORM = (vx_scalar) parameters[7];
	vx_scalar BORDER = (vx_scalar) parameters[8];

	Mat *mat, bl;
	int ddepth, W, H, a_x = -1, a_y = -1, border = 4;

	vx_int32 value = 0;
	vx_bool value_b, norm;

	//Extracting Values from the Scalar into Ksize and Ddepth
	STATUS_ERROR_CHECK(vxReadScalarValue(scalar, &value));ddepth = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(S_width, &value));W = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(S_height, &value));H = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(A_X, &value));a_x = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(A_Y, &value));a_y = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(NORM, &value_b));norm = value_b;
	STATUS_ERROR_CHECK(vxReadScalarValue(BORDER, &value));border = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(match_vx_image_parameters(image_in, image_out));
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	Point point;
	bool Normalized;
	if (norm == vx_true_e) Normalized = true; else Normalized = false;
	point.x = a_x;
	point.y = a_y;
	cv::boxFilter(*mat, bl, ddepth, Size(W, H), point, Normalized, border);

	//Converting OpenCV Mat into VX Image
	STATUS_ERROR_CHECK(CV_to_VX_Image(image_out, &bl));

	return status;
}
コード例 #8
0
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_sepFilter2D_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;

	vx_image image_in = (vx_image) parameters[0];
	vx_image image_out = (vx_image) parameters[1];
	vx_scalar scalar = (vx_scalar) parameters[2];
	vx_matrix KERNELX = (vx_matrix) parameters[3];
	vx_matrix KERNELY = (vx_matrix) parameters[4];
	vx_scalar A_X = (vx_scalar) parameters[5];
	vx_scalar A_Y = (vx_scalar) parameters[6];
	vx_scalar DELTA = (vx_scalar) parameters[7];
	vx_scalar BORDER = (vx_scalar) parameters[8];

	Mat *mat, bl;
	int ddepth, a_x = -1, a_y = -1, border = 4;
	float delta = 0;
	vx_int32 value = 0;
	vx_float32 value_f = 0;

	//Extracting Values from the Scalar into Ksize and Ddepth
	STATUS_ERROR_CHECK(vxReadScalarValue(scalar, &value)); ddepth = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(A_X, &value)); a_x = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(A_Y, &value)); a_y = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(DELTA, &value_f)); delta = value_f;
	STATUS_ERROR_CHECK(vxReadScalarValue(BORDER, &value)); border = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(match_vx_image_parameters(image_in, image_out));
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	Point point;
	point.x = a_x;
	point.y = a_y;
	Mat *kernelX, *kernelY;
	STATUS_ERROR_CHECK(VX_to_CV_MATRIX(&kernelX, KERNELX));
	STATUS_ERROR_CHECK(VX_to_CV_MATRIX(&kernelY, KERNELY));
	cv::sepFilter2D(*mat, bl, ddepth, *kernelX, *kernelY, point, delta, border);

	//Converting OpenCV Mat into VX Image
	STATUS_ERROR_CHECK(CV_to_VX_Image(image_out, &bl));

	return status;
}
/************************************************************************************************************
Execution Kernel
*************************************************************************************************************/
static vx_status VX_CALLBACK CV_warpPerspective_Kernel(vx_node node, const vx_reference *parameters, vx_uint32 num)
{
	vx_status status = VX_SUCCESS;

	vx_image image_in = (vx_image) parameters[0];
	vx_image image_out = (vx_image) parameters[1];
	vx_matrix KERNEL = (vx_matrix) parameters[2];
	vx_scalar A_X = (vx_scalar) parameters[3];
	vx_scalar A_Y = (vx_scalar) parameters[4];
	vx_scalar FLAGS = (vx_scalar) parameters[5];
	vx_scalar BORDER = (vx_scalar) parameters[6];

	Mat *mat, bl;
	int flags;
	int a_x = -1, a_y = -1, border = 4;
	vx_int32 value = 0;

	//Extracting Values from the Scalar into Ksize and Ddepth
	STATUS_ERROR_CHECK(vxReadScalarValue(FLAGS, &value)); flags = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(A_X, &value)); a_x = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(A_Y, &value)); a_y = value;
	STATUS_ERROR_CHECK(vxReadScalarValue(BORDER, &value)); border = value;

	//Converting VX Image to OpenCV Mat
	STATUS_ERROR_CHECK(match_vx_image_parameters(image_in, image_out));
	STATUS_ERROR_CHECK(VX_to_CV_Image(&mat, image_in));

	//Compute using OpenCV
	Mat *M;
	STATUS_ERROR_CHECK(VX_to_CV_MATRIX(&M, KERNEL));
	cv::warpPerspective(*mat, bl, *M, Size(a_x, a_y), flags, border);

	//Converting OpenCV Mat into VX Image
	STATUS_ERROR_CHECK(CV_to_VX_Image(image_out, &bl));

	return status;
}