void pygi_marshal_cleanup_args_from_py_parameter_fail (PyGIInvokeState *state, PyGICallableCache *cache, gssize failed_arg_index) { gssize i; state->failed = TRUE; for (i = 0; i < _pygi_callable_cache_args_len (cache) && i <= failed_arg_index; i++) { PyGIArgCache *arg_cache = _pygi_callable_cache_get_arg (cache, i); PyGIMarshalCleanupFunc cleanup_func = arg_cache->from_py_cleanup; gpointer data = state->args[i]->v_pointer; PyObject *py_arg = PyTuple_GET_ITEM (state->py_in_args, arg_cache->py_arg_index); if (cleanup_func && arg_cache->direction == PYGI_DIRECTION_FROM_PYTHON && data != NULL) { cleanup_func (state, arg_cache, py_arg, data, i < failed_arg_index); } else if (arg_cache->is_caller_allocates && data != NULL) { _cleanup_caller_allocates (state, arg_cache, py_arg, data, FALSE); } } }
/** * Cleanup during invoke can happen in multiple * stages, each of which can be the result of a * successful compleation of that stage or an error * occured which requires partial cleanup. * * For the most part, either the C interface being * invoked or the python object which wraps the * parameters, handle their lifecycles but in some * cases, where we have intermediate objects, * or when we fail processing a parameter, we need * to handle the clean up manually. * * There are two argument processing stages. * They are the in stage, where we process python * parameters into their C counterparts, and the out * stage, where we process out C parameters back * into python objects. The in stage also sets up * temporary out structures for caller allocated * parameters which need to be cleaned up either on * in stage failure or at the completion of the out * stage (either success or failure) * * The in stage must call one of these cleanup functions: * - pygi_marshal_cleanup_args_from_py_marshal_success * (continue to out stage) * - pygi_marshal_cleanup_args_from_py_parameter_fail * (final, exit from invoke) * * The out stage must call one of these cleanup functions which are all final: * - pygi_marshal_cleanup_args_to_py_marshal_success * - pygi_marshal_cleanup_args_return_fail * - pygi_marshal_cleanup_args_to_py_parameter_fail * **/ void pygi_marshal_cleanup_args_from_py_marshal_success (PyGIInvokeState *state, PyGICallableCache *cache) { gssize i; for (i = 0; i < _pygi_callable_cache_args_len (cache); i++) { PyGIArgCache *arg_cache = _pygi_callable_cache_get_arg (cache, i); PyGIMarshalCleanupFunc cleanup_func = arg_cache->from_py_cleanup; PyObject *py_arg = PyTuple_GET_ITEM (state->py_in_args, arg_cache->py_arg_index); gpointer cleanup_data = state->args_cleanup_data[i]; /* Only cleanup using args_cleanup_data when available. * It is the responsibility of the various "from_py" marshalers to return * cleanup_data which is then passed into their respective cleanup function. * PyGIInvokeState.args_cleanup_data stores this data (via _invoke_marshal_in_args) * for the duration of the invoke up until this point. */ if (cleanup_func && cleanup_data != NULL && arg_cache->direction & PYGI_DIRECTION_FROM_PYTHON) { cleanup_func (state, arg_cache, py_arg, cleanup_data, TRUE); state->args_cleanup_data[i] = NULL; } } }
static void _pygi_closure_clear_retvals (PyGIInvokeState *state, PyGICallableCache *cache, gpointer resp) { gsize i; GIArgument arg = { 0, }; if (cache->return_cache->type_tag != GI_TYPE_TAG_VOID) { _pygi_closure_assign_pyobj_to_retval (resp, &arg, cache->return_cache); } for (i = 0; i < _pygi_callable_cache_args_len (cache); i++) { PyGIArgCache *arg_cache = g_ptr_array_index (cache->args_cache, i); if (arg_cache->direction & PYGI_DIRECTION_FROM_PYTHON) { _pygi_closure_assign_pyobj_to_out_argument (state->args[i].arg_pointer.v_pointer, &arg, arg_cache); } } if (cache->throws) { gssize error_index = state->n_args - 1; GError **error = (GError **) state->args[error_index].arg_value.v_pointer; if (error != NULL) { pygi_gerror_exception_check (error); } } }
static gboolean _invoke_state_init_from_cache (PyGIInvokeState *state, PyGIClosureCache *closure_cache, void **args) { PyGICallableCache *cache = (PyGICallableCache *) closure_cache; state->n_args = _pygi_callable_cache_args_len (cache); state->n_py_in_args = state->n_args; /* Increment after setting the number of Python input args */ if (cache->throws) { state->n_args++; } state->py_in_args = PyTuple_New (state->n_py_in_args); if (state->py_in_args == NULL) { PyErr_NoMemory (); return FALSE; } state->args = NULL; state->error = NULL; if (!_pygi_invoke_arg_state_init (state)) { return FALSE; } state->ffi_args = NULL; _pygi_closure_convert_ffi_arguments (state->args, cache, args); return TRUE; }
static gboolean _invoke_state_init_from_cache (PyGIInvokeState *state, PyGIFunctionCache *function_cache, PyObject *py_args, PyObject *kwargs) { PyGICallableCache *cache = (PyGICallableCache *) function_cache; state->n_args = _pygi_callable_cache_args_len (cache); if (cache->throws) { state->n_args++; } /* Copy the function pointer to the state for the normal case. For vfuncs, * this has already been filled out based on the implementor's GType. */ if (state->function_ptr == NULL) state->function_ptr = function_cache->invoker.native_address; state->py_in_args = _py_args_combine_and_check_length (cache, py_args, kwargs); if (state->py_in_args == NULL) { return FALSE; } state->n_py_in_args = PyTuple_Size (state->py_in_args); if (!_pygi_invoke_arg_state_init (state)) { return FALSE; } state->error = NULL; if (cache->throws) { gssize error_index = state->n_args - 1; /* The ffi argument for GError needs to be a triple pointer. */ state->args[error_index].arg_pointer.v_pointer = &state->error; state->ffi_args[error_index] = &(state->args[error_index].arg_pointer); } return TRUE; }
/* pygi_invoke_marshal_in_args: * * Fills out the state struct argument lists. arg_values will always hold * actual values marshaled either to or from Python and C. arg_pointers will * hold pointers (via v_pointer) to auxilary value storage. This will normally * point to values stored in arg_values. In the case of caller allocated * out args, arg_pointers[x].v_pointer will point to newly allocated memory. * arg_pointers inserts a level of pointer indirection between arg_values * and the argument list ffi receives when dealing with non-caller allocated * out arguments. * * For example: * [[ * void callee (int *i, int j) { *i = 50 - j; } * void caller () { * int i = 0; * callee (&i, 8); * } * * args[0] == &arg_pointers[0]; * arg_pointers[0].v_pointer == &arg_values[0]; * arg_values[0].v_int == 42; * * args[1] == &arg_values[1]; * arg_values[1].v_int == 8; * ]] * */ static gboolean _invoke_marshal_in_args (PyGIInvokeState *state, PyGIFunctionCache *function_cache) { PyGICallableCache *cache = (PyGICallableCache *) function_cache; gssize i; if (state->n_py_in_args > cache->n_py_args) { char *full_name = pygi_callable_cache_get_full_name (cache); PyErr_Format (PyExc_TypeError, "%s() takes exactly %zd argument(s) (%zd given)", full_name, cache->n_py_args, state->n_py_in_args); g_free (full_name); return FALSE; } for (i = 0; i < _pygi_callable_cache_args_len (cache); i++) { GIArgument *c_arg = &state->args[i].arg_value; PyGIArgCache *arg_cache = g_ptr_array_index (cache->args_cache, i); PyObject *py_arg = NULL; switch (arg_cache->direction) { case PYGI_DIRECTION_FROM_PYTHON: /* The ffi argument points directly at memory in arg_values. */ state->ffi_args[i] = c_arg; if (arg_cache->meta_type == PYGI_META_ARG_TYPE_CLOSURE) { state->ffi_args[i]->v_pointer = state->user_data; continue; } else if (arg_cache->meta_type != PYGI_META_ARG_TYPE_PARENT) continue; if (arg_cache->py_arg_index >= state->n_py_in_args) { char *full_name = pygi_callable_cache_get_full_name (cache); PyErr_Format (PyExc_TypeError, "%s() takes exactly %zd argument(s) (%zd given)", full_name, cache->n_py_args, state->n_py_in_args); g_free (full_name); /* clean up all of the args we have already marshalled, * since invoke will not be called */ pygi_marshal_cleanup_args_from_py_parameter_fail (state, cache, i); return FALSE; } py_arg = PyTuple_GET_ITEM (state->py_in_args, arg_cache->py_arg_index); break; case PYGI_DIRECTION_BIDIRECTIONAL: if (arg_cache->meta_type != PYGI_META_ARG_TYPE_CHILD) { if (arg_cache->py_arg_index >= state->n_py_in_args) { char *full_name = pygi_callable_cache_get_full_name (cache); PyErr_Format (PyExc_TypeError, "%s() takes exactly %zd argument(s) (%zd given)", full_name, cache->n_py_args, state->n_py_in_args); g_free (full_name); pygi_marshal_cleanup_args_from_py_parameter_fail (state, cache, i); return FALSE; } py_arg = PyTuple_GET_ITEM (state->py_in_args, arg_cache->py_arg_index); } /* Fall through */ case PYGI_DIRECTION_TO_PYTHON: /* arg_pointers always stores a pointer to the data to be marshaled "to python" * even in cases where arg_pointers is not being used as indirection between * ffi and arg_values. This gives a guarantee that out argument marshaling * (_invoke_marshal_out_args) can always rely on arg_pointers pointing to * the correct chunk of memory to marshal. */ state->args[i].arg_pointer.v_pointer = c_arg; if (arg_cache->is_caller_allocates) { /* In the case of caller allocated out args, we don't use * an extra level of indirection and state->args will point * directly at the data to be marshaled. However, as noted * above, arg_pointers will also point to this caller allocated * chunk of memory used by out argument marshaling. */ state->ffi_args[i] = c_arg; if (!_caller_alloc (arg_cache, c_arg)) { char *full_name = pygi_callable_cache_get_full_name (cache); PyErr_Format (PyExc_TypeError, "Could not caller allocate argument %zd of callable %s", i, full_name); g_free (full_name); pygi_marshal_cleanup_args_from_py_parameter_fail (state, cache, i); return FALSE; } } else { /* Non-caller allocated out args will use arg_pointers as an * extra level of indirection */ state->ffi_args[i] = &state->args[i].arg_pointer; } break; } if (py_arg == _PyGIDefaultArgPlaceholder) { *c_arg = arg_cache->default_value; } else if (arg_cache->from_py_marshaller != NULL && arg_cache->meta_type != PYGI_META_ARG_TYPE_CHILD) { gboolean success; gpointer cleanup_data = NULL; if (!arg_cache->allow_none && py_arg == Py_None) { PyErr_Format (PyExc_TypeError, "Argument %zd does not allow None as a value", i); pygi_marshal_cleanup_args_from_py_parameter_fail (state, cache, i); return FALSE; } success = arg_cache->from_py_marshaller (state, cache, arg_cache, py_arg, c_arg, &cleanup_data); state->args[i].arg_cleanup_data = cleanup_data; if (!success) { pygi_marshal_cleanup_args_from_py_parameter_fail (state, cache, i); return FALSE; } } } return TRUE; }
static gboolean _pygi_closure_set_out_arguments (PyGIInvokeState *state, PyGICallableCache *cache, PyObject *py_retval, void *resp) { gssize i; gssize i_py_retval = 0; gboolean success; if (cache->return_cache->type_tag != GI_TYPE_TAG_VOID) { PyObject *item = py_retval; if (PyTuple_Check (py_retval)) { item = PyTuple_GET_ITEM (py_retval, 0); } success = cache->return_cache->from_py_marshaller (state, cache, cache->return_cache, item, &state->return_arg, &state->args[0].arg_cleanup_data); if (!success) { pygi_marshal_cleanup_args_return_fail (state, cache); return FALSE; } _pygi_closure_assign_pyobj_to_retval (resp, &state->return_arg, cache->return_cache); i_py_retval++; } for (i = 0; i < _pygi_callable_cache_args_len (cache); i++) { PyGIArgCache *arg_cache = g_ptr_array_index (cache->args_cache, i); if (arg_cache->direction & PYGI_DIRECTION_FROM_PYTHON) { PyObject *item = py_retval; if (arg_cache->type_tag == GI_TYPE_TAG_ERROR) { * (GError **) state->args[i].arg_pointer.v_pointer = NULL; continue; } if (PyTuple_Check (py_retval)) { item = PyTuple_GET_ITEM (py_retval, i_py_retval); } else if (i_py_retval != 0) { pygi_marshal_cleanup_args_to_py_parameter_fail (state, cache, i_py_retval); return FALSE; } success = arg_cache->from_py_marshaller (state, cache, arg_cache, item, &state->args[i].arg_value, &state->args[i_py_retval].arg_cleanup_data); if (!success) { pygi_marshal_cleanup_args_to_py_parameter_fail (state, cache, i_py_retval); return FALSE; } _pygi_closure_assign_pyobj_to_out_argument (state->args[i].arg_pointer.v_pointer, &state->args[i].arg_value, arg_cache); i_py_retval++; } } return TRUE; }
static gboolean _pygi_closure_convert_arguments (PyGIInvokeState *state, PyGIClosureCache *closure_cache) { PyGICallableCache *cache = (PyGICallableCache *) closure_cache; gssize n_in_args = 0; gssize i; for (i = 0; i < _pygi_callable_cache_args_len (cache); i++) { PyGIArgCache *arg_cache; arg_cache = g_ptr_array_index (cache->args_cache, i); if (arg_cache->direction & PYGI_DIRECTION_TO_PYTHON) { PyObject *value; if (cache->user_data_index == i) { if (state->user_data == NULL) { /* user_data can be NULL for connect functions which don't accept * user_data or as the default for user_data in the middle of function * arguments. */ Py_INCREF (Py_None); value = Py_None; } else { /* Extend the callbacks args with user_data as variable args. */ gssize j, user_data_len; PyObject *py_user_data = state->user_data; if (!PyTuple_Check (py_user_data)) { PyErr_SetString (PyExc_TypeError, "expected tuple for callback user_data"); return FALSE; } user_data_len = PyTuple_Size (py_user_data); _PyTuple_Resize (&state->py_in_args, state->n_py_in_args + user_data_len - 1); for (j = 0; j < user_data_len; j++, n_in_args++) { value = PyTuple_GetItem (py_user_data, j); Py_INCREF (value); PyTuple_SET_ITEM (state->py_in_args, n_in_args, value); } /* We can assume user_data args are never going to be inout, * so just continue here. */ continue; } } else if (arg_cache->meta_type != PYGI_META_ARG_TYPE_PARENT) { continue; } else { value = arg_cache->to_py_marshaller (state, cache, arg_cache, &state->args[i].arg_value); if (value == NULL) { pygi_marshal_cleanup_args_to_py_parameter_fail (state, cache, i); return FALSE; } } PyTuple_SET_ITEM (state->py_in_args, n_in_args, value); n_in_args++; } } if (_PyTuple_Resize (&state->py_in_args, n_in_args) == -1) return FALSE; return TRUE; }
static void _pygi_closure_convert_ffi_arguments (PyGIInvokeArgState *state, PyGICallableCache *cache, void **args) { gint i; for (i = 0; i < _pygi_callable_cache_args_len (cache); i++) { PyGIArgCache *arg_cache = g_ptr_array_index (cache->args_cache, i); if (arg_cache->direction & PYGI_DIRECTION_FROM_PYTHON) { state[i].arg_value.v_pointer = * (gpointer *) args[i]; if (state[i].arg_value.v_pointer == NULL) continue; state[i].arg_pointer.v_pointer = state[i].arg_value.v_pointer; state[i].arg_value = *(GIArgument *) state[i].arg_value.v_pointer; continue; } switch (arg_cache->type_tag) { case GI_TYPE_TAG_BOOLEAN: state[i].arg_value.v_boolean = * (gboolean *) args[i]; break; case GI_TYPE_TAG_INT8: state[i].arg_value.v_int8 = * (gint8 *) args[i]; break; case GI_TYPE_TAG_UINT8: state[i].arg_value.v_uint8 = * (guint8 *) args[i]; break; case GI_TYPE_TAG_INT16: state[i].arg_value.v_int16 = * (gint16 *) args[i]; break; case GI_TYPE_TAG_UINT16: state[i].arg_value.v_uint16 = * (guint16 *) args[i]; break; case GI_TYPE_TAG_INT32: state[i].arg_value.v_int32 = * (gint32 *) args[i]; break; case GI_TYPE_TAG_UINT32: state[i].arg_value.v_uint32 = * (guint32 *) args[i]; break; case GI_TYPE_TAG_INT64: state[i].arg_value.v_int64 = * (glong *) args[i]; break; case GI_TYPE_TAG_UINT64: state[i].arg_value.v_uint64 = * (glong *) args[i]; break; case GI_TYPE_TAG_FLOAT: state[i].arg_value.v_float = * (gfloat *) args[i]; break; case GI_TYPE_TAG_DOUBLE: state[i].arg_value.v_double = * (gdouble *) args[i]; break; case GI_TYPE_TAG_UTF8: state[i].arg_value.v_string = * (gchar **) args[i]; break; case GI_TYPE_TAG_INTERFACE: { GIBaseInfo *interface; GIInfoType interface_type; interface = ((PyGIInterfaceCache *) arg_cache)->interface_info; interface_type = g_base_info_get_type (interface); if (interface_type == GI_INFO_TYPE_ENUM) { state[i].arg_value.v_int = * (gint *) args[i]; } else if (interface_type == GI_INFO_TYPE_FLAGS) { state[i].arg_value.v_uint = * (guint *) args[i]; } else { state[i].arg_value.v_pointer = * (gpointer *) args[i]; } break; } case GI_TYPE_TAG_ERROR: case GI_TYPE_TAG_GHASH: case GI_TYPE_TAG_GLIST: case GI_TYPE_TAG_GSLIST: case GI_TYPE_TAG_ARRAY: case GI_TYPE_TAG_VOID: state[i].arg_value.v_pointer = * (gpointer *) args[i]; break; default: g_warning ("Unhandled type tag %s", g_type_tag_to_string (arg_cache->type_tag)); state[i].arg_value.v_pointer = 0; } } if (cache->throws) { gssize error_index = _pygi_callable_cache_args_len (cache); state[error_index].arg_value.v_pointer = * (gpointer *) args[error_index]; } }