VX_API_ENTRY vx_parameter VX_API_CALL vxGetKernelParameterByIndex(vx_kernel kernel, vx_uint32 index) { vx_parameter parameter = NULL; if (vxIsValidSpecificReference(&kernel->base, VX_TYPE_KERNEL) == vx_true_e) { if (index < VX_INT_MAX_PARAMS && index < kernel->signature.num_parameters) { parameter = (vx_parameter)vxCreateReference(kernel->base.context, VX_TYPE_PARAMETER, VX_EXTERNAL, &kernel->base.context->base); if (parameter && parameter->base.type == VX_TYPE_PARAMETER) { parameter->index = index; parameter->node = NULL; parameter->kernel = kernel; vxIncrementReference(¶meter->kernel->base, VX_INTERNAL); } } else { vxAddLogEntry(&kernel->base, VX_ERROR_INVALID_PARAMETERS, "Index %u out of range for node %s (numparams = %u)!\n", index, kernel->name, kernel->signature.num_parameters); parameter = (vx_parameter_t *)vxGetErrorObject(kernel->base.context, VX_ERROR_INVALID_PARAMETERS); } } return parameter; }
VX_API_ENTRY vx_status VX_API_CALL vxCommitDistribution(vx_distribution distribution, const void *ptr) { vx_status status = VX_FAILURE; if ((vxIsValidSpecificReference(&distribution->base, VX_TYPE_DISTRIBUTION) == vx_true_e) && (vxAllocateMemory(distribution->base.context, &distribution->memory) == vx_true_e)) { if (ptr != NULL) { vxSemWait(&distribution->base.lock); { if (ptr != distribution->memory.ptrs[0]) { vx_size size = vxComputeMemorySize(&distribution->memory, 0); memcpy(distribution->memory.ptrs[0], ptr, size); VX_PRINT(VX_ZONE_INFO, "Copied distribution from %p to %p for "VX_FMT_SIZE" bytes\n", ptr, distribution->memory.ptrs[0], size); } } vxSemPost(&distribution->base.lock); vxWroteToReference(&distribution->base); } vxDecrementReference(&distribution->base, VX_EXTERNAL); status = VX_SUCCESS; } else { VX_PRINT(VX_ZONE_ERROR, "Not a valid object!\n"); } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxAccessDistribution(vx_distribution distribution, void **ptr, vx_enum usage) { vx_status status = VX_FAILURE; if ((vxIsValidSpecificReference(&distribution->base, VX_TYPE_DISTRIBUTION) == vx_true_e) && (vxAllocateMemory(distribution->base.context, &distribution->memory) == vx_true_e)) { if (ptr != NULL) { vxSemWait(&distribution->base.lock); { vx_size size = vxComputeMemorySize(&distribution->memory, 0); vxPrintMemory(&distribution->memory); if (*ptr == NULL) { *ptr = distribution->memory.ptrs[0]; } else if (*ptr != NULL) { memcpy(*ptr, distribution->memory.ptrs[0], size); } } vxSemPost(&distribution->base.lock); vxReadFromReference(&distribution->base); } vxIncrementReference(&distribution->base, VX_EXTERNAL); status = VX_SUCCESS; } else { VX_PRINT(VX_ZONE_ERROR, "Not a valid object!\n"); } return status; }
VX_API_ENTRY vx_array VX_API_CALL vxCreateVirtualArray(vx_graph graph, vx_enum item_type, vx_size capacity) { vx_array arr = NULL; if (vxIsValidSpecificReference(&graph->base, VX_TYPE_GRAPH) == vx_true_e) { if (((vxIsValidArrayItemType(graph->base.context, item_type) == vx_true_e) || item_type == VX_TYPE_INVALID)) { arr = (vx_array)vxCreateArrayInt(graph->base.context, item_type, capacity, vx_true_e, VX_TYPE_ARRAY); if (arr && arr->base.type == VX_TYPE_ARRAY) { arr->base.scope = (vx_reference_t *)graph; } else { arr = (vx_array)vxGetErrorObject(graph->base.context, VX_ERROR_NO_MEMORY); } } else { arr = (vx_array)vxGetErrorObject(graph->base.context, VX_ERROR_INVALID_PARAMETERS); } } return arr; }
VX_API_ENTRY vx_status VX_API_CALL vxQueryScalar(vx_scalar scalar, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; vx_scalar_t *pscalar = (vx_scalar_t *)scalar; if (vxIsValidSpecificReference(&pscalar->base,VX_TYPE_SCALAR) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; switch (attribute) { case VX_SCALAR_ATTRIBUTE_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { *(vx_enum *)ptr = pscalar->data_type; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } return status; }
vx_status vxAddParameterToKernel(vx_kernel kernel, vx_uint32 index, vx_enum dir, vx_enum type, vx_enum state) { vx_status status = VX_ERROR_INVALID_PARAMETERS; vx_kernel_t *kern = (vx_kernel_t *)kernel; VX_PRINT(VX_ZONE_KERNEL,"INFO: Adding index %u, type 0x%x, dir:%d state:%d\n", index, type, dir, state); if (vxIsValidSpecificReference(&kern->base, VX_TYPE_KERNEL) == vx_true_e) { if (index < kern->signature.numParams) { if (kern->tiling_function) { if (((type != VX_TYPE_IMAGE) && (type != VX_TYPE_SCALAR)) || (vxIsValidDirection(dir) == vx_false_e) || (vxIsValidState(state) == vx_false_e)) { status = VX_ERROR_INVALID_PARAMETERS; } else { kern->signature.directions[index] = dir; kern->signature.types[index] = type; kern->signature.states[index] = state; status = VX_SUCCESS; } } else { if ((vxIsValidType(type) == vx_false_e) || (vxIsValidDirection(dir) == vx_false_e) || (vxIsValidState(state) == vx_false_e)) { status = VX_ERROR_INVALID_PARAMETERS; } else { kern->signature.directions[index] = dir; kern->signature.types[index] = type; kern->signature.states[index] = state; status = VX_SUCCESS; } } } else { status = VX_ERROR_INVALID_PARAMETERS; } } else { VX_PRINT(VX_ZONE_ERROR, "Not a valid reference!\n"); status = VX_ERROR_INVALID_REFERENCE; } return status; }
void vxReleaseReference(vx_reference_t *ref, vx_enum type, vx_bool internal, vx_destructor_f destructor) { if (vxIsValidSpecificReference(ref, type) == vx_true_e) { vx_bool result = vx_false_e; if (internal == vx_true_e) result = vxDecrementIntReference(ref); else result = vxDecrementReference(ref); if ((result == vx_true_e) && (vxTotalReferenceCount(ref) == 0)) { /* if the caller supplied a destructor, call it. */ if (destructor) { destructor(ref); } vxRemoveReference(ref->context, ref); ref->magic = 0; /* make sure no existing copies of refs can use ref again */ free(ref); } } }
void vxDestructParameter(vx_reference ref) { vx_parameter param = (vx_parameter)ref; if (param->node) { if (vxIsValidSpecificReference(¶m->node->base, VX_TYPE_NODE) == vx_true_e) { vx_node node = (vx_node)param->node; vxReleaseReferenceInt((vx_reference *)&node, VX_TYPE_NODE, VX_INTERNAL, NULL); } } if (param->kernel) { if (vxIsValidSpecificReference(¶m->kernel->base, VX_TYPE_KERNEL) == vx_true_e) { vx_kernel kernel = (vx_kernel)param->kernel; vxReleaseReferenceInt((vx_reference *)&kernel, VX_TYPE_KERNEL, VX_INTERNAL, NULL); } } }
vx_status vxQueryConvolution(vx_convolution conv, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; vx_convolution_t *convolution = (vx_convolution_t *)conv; if (vxIsValidSpecificReference(&convolution->base.base, VX_TYPE_CONVOLUTION) == vx_false_e) { return VX_ERROR_INVALID_REFERENCE; } switch (attribute) { case VX_CONVOLUTION_ATTRIBUTE_ROWS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size *)ptr = convolution->base.rows; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_CONVOLUTION_ATTRIBUTE_COLUMNS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size *)ptr = convolution->base.columns; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_CONVOLUTION_ATTRIBUTE_SCALE: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { *(vx_uint32 *)ptr = convolution->scale; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_CONVOLUTION_ATTRIBUTE_SIZE: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size *)ptr = convolution->base.columns * convolution->base.rows * sizeof(vx_int16); } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } return status; }
static vx_bool vxIsValidArray(vx_array arr) { vx_bool res = vx_false_e; if (arr != NULL) res = vxIsValidSpecificReference(&arr->base, VX_TYPE_ARRAY); if (res == vx_true_e) res = vxIsValidArrayItemType(arr->base.context, arr->item_type); return res; }
VX_API_ENTRY vx_status VX_API_CALL vxSetParameterByReference(vx_parameter parameter, vx_reference value) { vx_status status = VX_ERROR_INVALID_PARAMETERS; if (vxIsValidSpecificReference((vx_reference_t *)parameter, VX_TYPE_PARAMETER) == vx_true_e) { if (parameter->node) { status = vxSetParameterByIndex(parameter->node, parameter->index, value); } } return status; }
vx_status vxRemoveKernel(vx_kernel kernel) { vx_status status = VX_ERROR_INVALID_PARAMETERS; vx_kernel_t *kern = (vx_kernel_t *)kernel; if (vxIsValidSpecificReference(&kern->base, VX_TYPE_KERNEL) == vx_true_e) { vxDecrementReference(&kern->base); kern->enabled = vx_false_e; kern->enumeration = VX_KERNEL_INVALID; kern->base.context->numKernels--; status = VX_SUCCESS; } return status; }
VX_API_ENTRY vx_image VX_API_CALL vxGetPyramidLevel(vx_pyramid pyramid, vx_uint32 index) { vx_image image = 0; if (vxIsValidSpecificReference(&pyramid->base, VX_TYPE_PYRAMID) == vx_true_e) { if (index < pyramid->numLevels) { image = pyramid->levels[index]; vxIncrementReference(&image->base, VX_EXTERNAL); } } return image; }
void vxReleaseKernel(vx_kernel *kernel) { vx_kernel_t *kern = (vx_kernel_t *)(kernel?*kernel:0); if (vxIsValidSpecificReference(&kern->base, VX_TYPE_KERNEL) == vx_true_e) { VX_PRINT(VX_ZONE_KERNEL, "Releasing kernel "VX_FMT_REF"\n", (void *)kern); vxDecrementReference(&kern->base); } else { VX_PRINT(VX_ZONE_ERROR, "Invalid Reference!\n"); } if (kernel) *kernel = 0; }
VX_API_ENTRY vx_status VX_API_CALL vxCommitLUT(vx_lut l, const void *ptr) { vx_status status = VX_FAILURE; vx_lut_t *lut = (vx_lut_t *)l; if (vxIsValidSpecificReference(&lut->base, VX_TYPE_LUT) == vx_true_e) { status = vxCommitArrayRangeInt((vx_array_t *)l, 0, lut->num_items, ptr); } else { VX_PRINT(VX_ZONE_ERROR, "Not a valid object!\n"); } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxQueryLUT(vx_lut l, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; vx_lut_t *lut = (vx_lut_t *)l; if (vxIsValidSpecificReference(&lut->base, VX_TYPE_LUT) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; switch (attribute) { case VX_LUT_ATTRIBUTE_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { *(vx_enum *)ptr = lut->item_type; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_LUT_ATTRIBUTE_COUNT: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size *)ptr = lut->num_items; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_LUT_ATTRIBUTE_SIZE: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size *)ptr = lut->num_items * lut->item_size; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } return status; }
VX_API_ENTRY vx_pyramid VX_API_CALL vxCreateVirtualPyramid(vx_graph graph, vx_size levels, vx_float32 scale, vx_uint32 width, vx_uint32 height, vx_df_image format) { vx_pyramid pyramid = NULL; if (vxIsValidSpecificReference(&graph->base, VX_TYPE_GRAPH) == vx_true_e) { pyramid = vxCreatePyramidInt(graph->base.context, levels, scale, width, height, format, vx_true_e); if (pyramid && pyramid->base.type == VX_TYPE_PYRAMID) { pyramid->base.scope = (vx_reference_t *)graph; } } return pyramid; }
vx_status vxAccessConvolutionCoefficients(vx_convolution conv, vx_int16 *array) { vx_convolution_t *convolution = (vx_convolution_t *)conv; vx_status status = VX_ERROR_INVALID_REFERENCE; if ((vxIsValidSpecificReference(&convolution->base.base, VX_TYPE_CONVOLUTION) == vx_true_e) && (vxAllocateMemory(convolution->base.base.context, &convolution->base.memory) == vx_true_e)) { vxSemWait(&convolution->base.base.lock); if (array) { vx_size size = convolution->base.memory.strides[0][1] * convolution->base.memory.dims[0][1]; memcpy(array, convolution->base.memory.ptrs[0], size); } vxSemPost(&convolution->base.base.lock); vxReadFromReference(&convolution->base.base); vxIncrementReference(&convolution->base.base); status = VX_SUCCESS; } return status; }
vx_status vxSetConvolutionAttribute(vx_convolution conv, vx_enum attr, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; vx_convolution_t *convolution = (vx_convolution_t *)conv; if (vxIsValidSpecificReference(&convolution->base.base, VX_TYPE_CONVOLUTION) == vx_false_e) { return VX_ERROR_INVALID_REFERENCE; } switch (attr) { case VX_CONVOLUTION_ATTRIBUTE_SCALE: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { vx_uint32 scale = *(vx_uint32 *)ptr; if (vxIsPowerOfTwo(scale) == vx_true_e) { VX_PRINT(VX_ZONE_INFO, "Convolution Scale assigned to %u\n", scale); convolution->scale = scale; } else { status = VX_ERROR_INVALID_VALUE; } } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_INVALID_PARAMETERS; break; } if (status != VX_SUCCESS) { VX_PRINT(VX_ZONE_ERROR, "Failed to set attribute on convolution! (%d)\n", status); } return status; }
VX_API_ENTRY vx_parameter VX_API_CALL vxGetParameterByIndex(vx_node node, vx_uint32 index) { vx_parameter param = NULL; if (vxIsValidSpecificReference(&node->base, VX_TYPE_NODE) == vx_false_e) { return param; } if (node->kernel == NULL) { /* this can probably never happen */ vxAddLogEntry(&node->base, VX_ERROR_INVALID_NODE, "Node was created without a kernel! Fatal Error!\n"); param = (vx_parameter_t *)vxGetErrorObject(node->base.context, VX_ERROR_INVALID_NODE); } else { if (/*0 <= index &&*/ index < VX_INT_MAX_PARAMS && index < node->kernel->signature.num_parameters) { param = (vx_parameter)vxCreateReference(node->base.context, VX_TYPE_PARAMETER, VX_EXTERNAL, &node->base); if (param && param->base.type == VX_TYPE_PARAMETER) { param->index = index; param->node = node; vxIncrementReference(¶m->node->base, VX_INTERNAL); param->kernel = node->kernel; vxIncrementReference(¶m->kernel->base, VX_INTERNAL); // if (node->parameters[index]) // vxIncrementReference(node->parameters[index], VX_INTERNAL); } } else { vxAddLogEntry(&node->base, VX_ERROR_INVALID_PARAMETERS, "Index %u out of range for node %s (numparams = %u)!\n", index, node->kernel->name, node->kernel->signature.num_parameters); param = (vx_parameter_t *)vxGetErrorObject(node->base.context, VX_ERROR_INVALID_PARAMETERS); } } VX_PRINT(VX_ZONE_API, "%s: returning %p\n", __FUNCTION__, param); return param; }
VX_API_ENTRY vx_status VX_API_CALL vxQueryImport(vx_import import, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference((vx_reference_t *)import, VX_TYPE_IMPORT) == vx_true_e) { switch (attribute) { case VX_IMPORT_ATTRIBUTE_COUNT: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { *(vx_uint32 *)ptr = import->count; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_IMPORT_ATTRIBUTE_TYPE: if ((size <= VX_MAX_TARGET_NAME) && (ptr != NULL)) { *(vx_uint32 *)ptr = import->type; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } } else status = VX_ERROR_INVALID_REFERENCE; return status; }
vx_status vxFinalizeKernel(vx_kernel kernel) { vx_status status = VX_SUCCESS; vx_kernel_t *kern = (vx_kernel_t *)kernel; if (vxIsValidSpecificReference(&kern->base, VX_TYPE_KERNEL) == vx_true_e) { vx_uint32 p = 0; for (p = 0; p < VX_INT_MAX_PARAMS; p++) { if (p >= kern->signature.numParams) { break; } if ((kern->signature.directions[p] < VX_INPUT) || (kern->signature.directions[p] > VX_BIDIRECTIONAL)) { status = VX_ERROR_INVALID_PARAMETERS; break; } if (vxIsValidType(kern->signature.types[p]) == vx_false_e) { status = VX_ERROR_INVALID_PARAMETERS; break; } } if (p == kern->signature.numParams) { kern->enabled = vx_true_e; kern->base.context->numKernels++; } } else { status = VX_ERROR_INVALID_REFERENCE; } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxCommitScalarValue(vx_scalar scalar, void *ptr) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference(&scalar->base,VX_TYPE_SCALAR) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; if (ptr == NULL) return VX_ERROR_INVALID_PARAMETERS; vxSemWait(&scalar->base.lock); switch (scalar->data_type) { case VX_TYPE_CHAR: scalar->data.chr = *(vx_char *)ptr; break; case VX_TYPE_INT8: scalar->data.s08 = *(vx_int8 *)ptr; break; case VX_TYPE_UINT8: scalar->data.u08 = *(vx_uint8 *)ptr; break; case VX_TYPE_INT16: scalar->data.s16 = *(vx_int16 *)ptr; break; case VX_TYPE_UINT16: scalar->data.u16 = *(vx_uint16 *)ptr; break; case VX_TYPE_INT32: scalar->data.s32 = *(vx_int32 *)ptr; break; case VX_TYPE_UINT32: scalar->data.u32 = *(vx_uint32 *)ptr; break; case VX_TYPE_INT64: scalar->data.s64 = *(vx_int64 *)ptr; break; case VX_TYPE_UINT64: scalar->data.u64 = *(vx_uint64 *)ptr; break; #if OVX_SUPPORT_HALF_FLOAT case VX_TYPE_FLOAT16: scalar->data.f16 = *(vx_float16 *)ptr; break; #endif case VX_TYPE_FLOAT32: scalar->data.f32 = *(vx_float32 *)ptr; break; case VX_TYPE_FLOAT64: scalar->data.f64 = *(vx_float64 *)ptr; break; case VX_TYPE_DF_IMAGE: scalar->data.fcc = *(vx_df_image *)ptr; break; case VX_TYPE_ENUM: scalar->data.enm = *(vx_enum *)ptr; break; case VX_TYPE_SIZE: scalar->data.size = *(vx_size *)ptr; break; case VX_TYPE_BOOL: scalar->data.boolean = *(vx_bool *)ptr; break; default: VX_PRINT(VX_ZONE_ERROR, "some case is not covered in %s\n", __FUNCTION__); status = VX_ERROR_NOT_SUPPORTED; break; } vxPrintScalarValue(scalar); vxSemPost(&scalar->base.lock); vxWroteToReference(&scalar->base); return status; }
VX_API_ENTRY vx_status VX_API_CALL vxSetMetaFormatFromReference(vx_meta_format meta, vx_reference examplar) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference(&meta->base, VX_TYPE_META_FORMAT) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; if (vxIsValidReference(examplar) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; switch (examplar->type) { case VX_TYPE_IMAGE: { vx_image image = (vx_image)examplar; meta->type = VX_TYPE_IMAGE; meta->dim.image.width = image->width; meta->dim.image.height = image->height; meta->dim.image.format = image->format; break; } case VX_TYPE_ARRAY: { vx_array array = (vx_array)examplar; meta->type = VX_TYPE_ARRAY; meta->dim.array.item_type = array->item_type; meta->dim.array.capacity = array->capacity; break; } case VX_TYPE_PYRAMID: { vx_pyramid pyramid = (vx_pyramid)examplar; meta->type = VX_TYPE_PYRAMID; meta->dim.pyramid.width = pyramid->width; meta->dim.pyramid.height = pyramid->height; meta->dim.pyramid.format = pyramid->format; meta->dim.pyramid.levels = pyramid->numLevels; meta->dim.pyramid.scale = pyramid->scale; break; } case VX_TYPE_SCALAR: { vx_scalar scalar = (vx_scalar)examplar; meta->type = VX_TYPE_SCALAR; meta->dim.scalar.type = scalar->data_type; break; } case VX_TYPE_MATRIX: { vx_matrix matrix = (vx_matrix)examplar; meta->type = VX_TYPE_MATRIX; meta->dim.matrix.type = matrix->data_type; meta->dim.matrix.cols = matrix->columns; meta->dim.matrix.rows = matrix->rows; break; } case VX_TYPE_DISTRIBUTION: { vx_distribution distribution = (vx_distribution)examplar; meta->type = VX_TYPE_DISTRIBUTION; meta->dim.distribution.bins = distribution->memory.dims[0][VX_DIM_X]; meta->dim.distribution.offset = distribution->offset_x; meta->dim.distribution.range = distribution->range_x; break; } case VX_TYPE_REMAP: { vx_remap remap = (vx_remap)examplar; meta->type = VX_TYPE_REMAP; meta->dim.remap.src_width = remap->src_width; meta->dim.remap.src_height = remap->src_height; meta->dim.remap.dst_width = remap->dst_width; meta->dim.remap.dst_height = remap->dst_height; break; } case VX_TYPE_LUT: { vx_lut_t *lut = (vx_lut_t *)examplar; meta->type = VX_TYPE_LUT; meta->dim.lut.type = lut->item_type; meta->dim.lut.count = lut->num_items; break; } case VX_TYPE_THRESHOLD: { vx_threshold threshold = (vx_threshold)examplar; meta->type = VX_TYPE_THRESHOLD; meta->dim.threshold.type = threshold->thresh_type; break; } default: status = VX_ERROR_INVALID_REFERENCE; break; } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxQueryDistribution(vx_distribution distribution, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference(&distribution->base, VX_TYPE_DISTRIBUTION) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; switch (attribute) { case VX_DISTRIBUTION_ATTRIBUTE_DIMENSIONS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size*)ptr = (vx_size)(distribution->memory.ndims - 1); } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_ATTRIBUTE_RANGE: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { *(vx_uint32*)ptr = (vx_uint32)(distribution->memory.dims[0][VX_DIM_X] * distribution->window_x); } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_ATTRIBUTE_BINS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size*)ptr = (vx_size)distribution->memory.dims[0][VX_DIM_X]; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_ATTRIBUTE_WINDOW: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { *(vx_uint32*)ptr = distribution->window_x; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_ATTRIBUTE_OFFSET: if (VX_CHECK_PARAM(ptr, size, vx_int32, 0x3)) { *(vx_int32*)ptr = distribution->offset_x; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_ATTRIBUTE_SIZE: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { vx_int32 d = distribution->memory.ndims - 1; *(vx_size*)ptr = distribution->memory.strides[0][d] * distribution->memory.dims[0][d]; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxSetMetaFormatAttribute(vx_meta_format meta, vx_enum attribute, const void *ptr, vx_size size) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference(&meta->base, VX_TYPE_META_FORMAT) == vx_false_e) return VX_ERROR_INVALID_REFERENCE; if (VX_TYPE(attribute) != meta->type) { return VX_ERROR_INVALID_TYPE; } switch(attribute) { case VX_IMAGE_FORMAT: if (VX_CHECK_PARAM(ptr, size, vx_df_image, 0x3)) { meta->dim.image.format = *(vx_df_image *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_IMAGE_HEIGHT: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.image.height = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_IMAGE_WIDTH: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.image.width = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_ARRAY_CAPACITY: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { meta->dim.array.capacity = *(vx_size *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_ARRAY_ITEMTYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { meta->dim.array.item_type = *(vx_enum *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_PYRAMID_FORMAT: if (VX_CHECK_PARAM(ptr, size, vx_df_image, 0x3)) { meta->dim.pyramid.format = *(vx_df_image *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_HEIGHT: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.pyramid.height = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_WIDTH: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.pyramid.width = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_LEVELS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { meta->dim.pyramid.levels = *(vx_size *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_SCALE: if (VX_CHECK_PARAM(ptr, size, vx_float32, 0x3)) { meta->dim.pyramid.scale = *(vx_float32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_SCALAR_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { meta->dim.scalar.type = *(vx_enum *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_MATRIX_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { meta->dim.matrix.type = *(vx_enum *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_MATRIX_ROWS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { meta->dim.matrix.rows = *(vx_size *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_MATRIX_COLUMNS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { meta->dim.matrix.cols = *(vx_size *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_DISTRIBUTION_BINS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { meta->dim.distribution.bins = *(vx_size *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_RANGE: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.distribution.range = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_DISTRIBUTION_OFFSET: if (VX_CHECK_PARAM(ptr, size, vx_int32, 0x3)) { meta->dim.distribution.offset = *(vx_int32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_REMAP_SOURCE_WIDTH: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.remap.src_width = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_REMAP_SOURCE_HEIGHT: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.remap.src_height = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_REMAP_DESTINATION_WIDTH: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.remap.dst_width = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_REMAP_DESTINATION_HEIGHT: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { meta->dim.remap.dst_height = *(vx_uint32 *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_LUT_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { meta->dim.lut.type = *(vx_enum *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_LUT_COUNT: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { meta->dim.lut.count = *(vx_size *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; /**********************************************************************/ case VX_THRESHOLD_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { meta->dim.threshold.type = *(vx_enum *)ptr; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } return status; }
vx_status vxSetThresholdAttribute(vx_threshold t, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; vx_threshold_t *thresh = (vx_threshold_t *)t; if (vxIsValidSpecificReference(&thresh->base, VX_TYPE_THRESHOLD) == vx_true_e) { switch (attribute) { case VX_THRESHOLD_ATTRIBUTE_VALUE: if (VX_CHECK_PARAM(ptr, size, vx_uint8, 0x0) && (thresh->type == VX_THRESHOLD_TYPE_BINARY)) { thresh->value = *(vx_uint8 *)ptr; vxWroteToReference(&thresh->base); } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_THRESHOLD_ATTRIBUTE_LOWER: if (VX_CHECK_PARAM(ptr, size, vx_uint8, 0x0) && (thresh->type == VX_THRESHOLD_TYPE_RANGE)) { thresh->lower = *(vx_uint8 *)ptr; vxWroteToReference(&thresh->base); } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_THRESHOLD_ATTRIBUTE_UPPER: if (VX_CHECK_PARAM(ptr, size, vx_uint8, 0x0) && (thresh->type == VX_THRESHOLD_TYPE_RANGE)) { thresh->upper = *(vx_uint8 *)ptr; vxWroteToReference(&thresh->base); } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_THRESHOLD_ATTRIBUTE_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) { vx_enum type = *(vx_enum *)ptr; if (vxIsValidThresholdType(type) == vx_true_e) { thresh->type = type; } else { status = VX_ERROR_INVALID_PARAMETERS; } } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } } else { status = VX_ERROR_INVALID_REFERENCE; } VX_PRINT(VX_ZONE_API, "return %d\n", status); return status; }
VX_API_ENTRY vx_status VX_API_CALL vxQueryPyramid(vx_pyramid pyramid, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference(&pyramid->base, VX_TYPE_PYRAMID) == vx_true_e) { switch (attribute) { case VX_PYRAMID_ATTRIBUTE_LEVELS: if (VX_CHECK_PARAM(ptr, size, vx_size, 0x3)) { *(vx_size *)ptr = pyramid->numLevels; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_ATTRIBUTE_SCALE: if (VX_CHECK_PARAM(ptr, size, vx_float32, 0x3)) { *(vx_float32 *)ptr = pyramid->scale; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_ATTRIBUTE_WIDTH: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { *(vx_uint32 *)ptr = pyramid->width; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_ATTRIBUTE_HEIGHT: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) { *(vx_uint32 *)ptr = pyramid->height; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; case VX_PYRAMID_ATTRIBUTE_FORMAT: if (VX_CHECK_PARAM(ptr, size, vx_df_image, 0x3)) { *(vx_df_image *)ptr = pyramid->format; } else { status = VX_ERROR_INVALID_PARAMETERS; } break; default: status = VX_ERROR_NOT_SUPPORTED; break; } } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxQueryParameter(vx_parameter parameter, vx_enum attribute, void *ptr, vx_size size) { vx_status status = VX_SUCCESS; if (vxIsValidSpecificReference(¶meter->base, VX_TYPE_PARAMETER) == vx_true_e) { switch (attribute) { case VX_PARAMETER_ATTRIBUTE_DIRECTION: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) *(vx_enum *)ptr = parameter->kernel->signature.directions[parameter->index]; else status = VX_ERROR_INVALID_PARAMETERS; break; case VX_PARAMETER_ATTRIBUTE_INDEX: if (VX_CHECK_PARAM(ptr, size, vx_uint32, 0x3)) *(vx_uint32 *)ptr = parameter->index; else status = VX_ERROR_INVALID_PARAMETERS; break; case VX_PARAMETER_ATTRIBUTE_TYPE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) *(vx_enum *)ptr = parameter->kernel->signature.types[parameter->index]; else status = VX_ERROR_INVALID_PARAMETERS; break; case VX_PARAMETER_ATTRIBUTE_STATE: if (VX_CHECK_PARAM(ptr, size, vx_enum, 0x3)) *(vx_enum *)ptr = (vx_enum)parameter->kernel->signature.states[parameter->index]; else status = VX_ERROR_INVALID_PARAMETERS; break; case VX_PARAMETER_ATTRIBUTE_REF: if (VX_CHECK_PARAM(ptr, size, vx_reference, 0x3)) { if (parameter->node) { vx_reference_t *ref = parameter->node->parameters[parameter->index]; /* does this object have USER access? */ if (ref) { /*! \internal this could potentially allow the user to break * a currently chosen optimization! We need to alert the * system that if a write occurs to this data, put the graph * into an unverified state. */ if (ref->external_count == 0) ref->extracted = vx_true_e; vxIncrementReference(ref, VX_EXTERNAL); } *(vx_reference *)ptr = (vx_reference)ref; } else status = VX_ERROR_NOT_SUPPORTED; } else status = VX_ERROR_INVALID_PARAMETERS; break; default: status = VX_ERROR_NOT_SUPPORTED; break; } } else { status = VX_ERROR_INVALID_REFERENCE; } return status; }
VX_API_ENTRY vx_status VX_API_CALL vxSetParameterByIndex(vx_node node, vx_uint32 index, vx_reference value) { vx_status status = VX_SUCCESS; vx_enum type = 0; vx_enum data_type = 0; if (vxIsValidSpecificReference(&node->base, VX_TYPE_NODE) == vx_false_e) { VX_PRINT(VX_ZONE_ERROR, "Supplied node was not actually a node\n"); status = VX_ERROR_INVALID_REFERENCE; goto exit; } VX_PRINT(VX_ZONE_PARAMETER, "Attempting to set parameter[%u] on %s (enum:%d) to "VX_FMT_REF"\n", index, node->kernel->name, node->kernel->enumeration, value); /* is the index out of bounds? */ if ((index >= node->kernel->signature.num_parameters) || (index >= VX_INT_MAX_PARAMS)) { VX_PRINT(VX_ZONE_ERROR, "Invalid index %u\n", index); status = VX_ERROR_INVALID_VALUE; goto exit; } /* if it's an optional parameter, it's ok to be NULL */ if ((value == 0) && (node->kernel->signature.states[index] == VX_PARAMETER_STATE_OPTIONAL)) { status = VX_SUCCESS; goto exit; } /* if it's required, it's got to exist */ if (vxIsValidReference((vx_reference_t *)value) == vx_false_e) { VX_PRINT(VX_ZONE_ERROR, "Supplied value was not actually a reference\n"); status = VX_ERROR_INVALID_REFERENCE; goto exit; } /* if it was a valid reference then get the type from it */ vxQueryReference(value, VX_REF_ATTRIBUTE_TYPE, &type, sizeof(type)); VX_PRINT(VX_ZONE_PARAMETER, "Query returned type %08x for ref "VX_FMT_REF"\n", type, value); /* Check that signature type matches reference type*/ if (node->kernel->signature.types[index] != type) { /* Check special case where signature is a specific scalar type. This can happen if the vxAddParameterToKernel() passes one of the scalar vx_type_e types instead of the more generic VX_TYPE_SCALAR since the spec doesn't specify that only VX_TYPE_SCALAR should be used for scalar types in this function. */ if((type == VX_TYPE_SCALAR) && (vxQueryScalar((vx_scalar)value, VX_SCALAR_ATTRIBUTE_TYPE, &data_type, sizeof(data_type)) == VX_SUCCESS)) { if(data_type != node->kernel->signature.types[index]) { VX_PRINT(VX_ZONE_ERROR, "Invalid scalar type 0x%08x!\n", data_type); status = VX_ERROR_INVALID_TYPE; goto exit; } } else { VX_PRINT(VX_ZONE_ERROR, "Invalid type 0x%08x!\n", type); status = VX_ERROR_INVALID_TYPE; goto exit; } } if (node->parameters[index]) { if (node->parameters[index]->delay!=NULL) { // we already have a delay element here */ vx_bool res = vxRemoveAssociationToDelay(node->parameters[index], node, index); if (res == vx_false_e) { VX_PRINT(VX_ZONE_ERROR, "Internal error removing delay association\n"); status = VX_ERROR_INVALID_REFERENCE; goto exit; } } } if (value->delay!=NULL) { /* the new parameter is a delay element */ vx_bool res = vxAddAssociationToDelay(value, node, index); if (res == vx_false_e) { VX_PRINT(VX_ZONE_ERROR, "Internal error adding delay association\n"); status = VX_ERROR_INVALID_REFERENCE; goto exit; } } /* actual change of the node parameter */ vxNodeSetParameter(node, index, value); /* if the node has a child graph, find out which parameter is this */ if (node->child) { vx_uint32 p = 0; for (p = 0; p < node->child->numParams; p++) { if ((node->child->parameters[p].node == node) && (node->child->parameters[p].index == index)) { status = vxSetGraphParameterByIndex((vx_graph)node->child, p, value); break; } } } exit: if (status == VX_SUCCESS) { VX_PRINT(VX_ZONE_PARAMETER, "Assigned Node[%u] %p type:%08x ref="VX_FMT_REF"\n", index, node, type, value); } else { VX_PRINT(VX_ZONE_ERROR, "Specified: parameter[%u] type:%08x => "VX_FMT_REF"\n", index, type, value); VX_PRINT(VX_ZONE_ERROR, "Required: parameter[%u] dir:%d type:%08x\n", index, node->kernel->signature.directions[index], node->kernel->signature.types[index]); } return status; }