static PyObject * PongoDict_get(PongoDict *self, PyObject *args, PyObject *kwargs) { PyObject *key, *dflt = Py_None; PyObject *klist = NULL; PyObject *ret = NULL; dbtype_t k, v; char *sep = "."; char *kwlist[] = {"key", "default", "sep", NULL}; int r; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|Os:get", kwlist, &key, &dflt, &sep)) return NULL; dblock(self->ctx); if (PyString_Check(key) || PyUnicode_Check(key)) { klist = PyObject_CallMethod(key, "split", "s", sep); k = from_python(self->ctx, klist); Py_XDECREF(klist); } else { k = from_python(self->ctx, key); } if (!PyErr_Occurred()) { if (dbtype(self->ctx, k) == List) { r = db_multi(SELF_CTX_AND_DBPTR, k, multi_GET, &v, 0); if (r == 0) { ret = to_python(self->ctx, v, TP_PROXY); } else if (dflt) { Py_INCREF(dflt); ret = dflt; } else { PyErr_SetObject(PyExc_KeyError, key); } } else if (dbobject_getitem(SELF_CTX_AND_DBPTR, k, &v) == 0) { ret = to_python(self->ctx, v, TP_PROXY); } else { if (dflt) { ret = dflt; Py_INCREF(ret); } else { PyErr_SetObject(PyExc_KeyError, key); } } } dbunlock(self->ctx); return ret; }
static PyObject * PongoDict_pop(PongoDict *self, PyObject *args, PyObject *kwargs) { PyObject *key, *dflt = NULL; PyObject *ret = NULL; dbtype_t k, v; int sync = self->ctx->sync; char *kwlist[] = {"key", "default", "sync", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|Oi:pop", kwlist, &key, &dflt, &sync)) return NULL; dblock(self->ctx); k = from_python(self->ctx, key); if (!PyErr_Occurred()) { if (dbobject_delitem(SELF_CTX_AND_DBPTR, k, &v, sync) < 0) { if (dflt) { Py_INCREF(dflt); ret = dflt; } else { PyErr_SetObject(PyExc_KeyError, key); } } else { ret = to_python(self->ctx, v, TP_PROXY); } } dbunlock(self->ctx); return ret; }
std::string add(const std::string& str_a, const std::string& str_b) { std_msgs::Int64 a = from_python<std_msgs::Int64>(str_a); std_msgs::Int64 b = from_python<std_msgs::Int64>(str_b); std_msgs::Int64 sum = AddTwoInts::add(a, b); return to_python(sum); }
static PyObject * PongoDict_search(PongoDict *self, PyObject *args) { PyObject *path, *value, *ret=NULL; dbtype_t dbpath, dbvalue, dbrslt; char *rel; char *sep = "."; int decpath = 0; relop_t relop; if (!PyArg_ParseTuple(args, "OsO:search", &path, &rel, &value)) return NULL; if (!strcmp(rel, "==") || !strcmp(rel, "eq")) { relop = db_EQ; } else if (!strcmp(rel, "!=") || !strcmp(rel, "ne")) { relop = db_NE; } else if (!strcmp(rel, "<") || !strcmp(rel, "lt")) { relop = db_LT; } else if (!strcmp(rel, "<=") || !strcmp(rel, "le")) { relop = db_LE; } else if (!strcmp(rel, ">") || !strcmp(rel, "gt")) { relop = db_GT; } else if (!strcmp(rel, ">=") || !strcmp(rel, "ge")) { relop = db_GE; } else { PyErr_Format(PyExc_ValueError, "Unknown relop '%s'", rel); return NULL; } if (PyString_Check(path)) { path = PyObject_CallMethod(path, "split", "s", sep); decpath = 1; } if (!PySequence_Check(path)) { PyErr_Format(PyExc_TypeError, "path must be a sequence"); return NULL; } dblock(self->ctx); dbpath = from_python(self->ctx, path); if (decpath) Py_DECREF(path); if (dbtype(self->ctx, dbpath) == List) { dbvalue = from_python(self->ctx, value); if (!PyErr_Occurred()) { dbrslt = dbcollection_new(self->ctx, 0); db_search(SELF_CTX_AND_DBPTR, dbpath, -1, relop, dbvalue, dbrslt); // PROXYCHLD means turn the root object into a real dict, but // create proxy objects for all children. ret = to_python(self->ctx, dbrslt, TP_PROXYCHLD); } } else { PyErr_Format(PyExc_Exception, "path type isn't List (%d)", dbtype(self->ctx, dbpath)); } dbunlock(self->ctx); return ret; }
static PyObject * PongoDict_native(PongoDict *self) { PyObject *ret; dblock(self->ctx); ret = to_python(SELF_CTX_AND_DBPTR, 0); dbunlock(self->ctx); return ret; }
PyObject* do_call(PyObject* args, PyObject* keywords) const { ref dict(keywords ? ref(keywords, ref::increment_count) : ref(PyDict_New())); return to_python( (*m_pf)(from_python(args, boost::python::type<Args>()), from_python(dict.get(), boost::python::type<Keywords>()))); }
PyObject* getter_function<ClassType, MemberType>::do_call( PyObject* args, PyObject* /* keywords */) const { PyObject* self; if (!PyArg_ParseTuple(args, const_cast<char*>("O"), &self)) return 0; return to_python( from_python(self, type<const ClassType*>())->*m_pm); }
static PyObject * PongoDict_set(PongoDict *self, PyObject *args, PyObject *kwargs) { PyObject *key, *value; PyObject *klist = NULL; PyObject *ret = NULL; dbtype_t k, v; int sync = self->ctx->sync; int fail = 0; multi_t op = multi_SET; char *kwlist[] = {"key", "value", "sep", "sync", "fail", NULL}; char *sep = "."; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO|sii:set", kwlist, &key, &value, &sep, &sync, &fail)) return NULL; k = DBNULL; dblock(self->ctx); if (PyString_Check(key) || PyUnicode_Check(key)) { klist = PyObject_CallMethod(key, "split", "s", sep); k = from_python(self->ctx, klist); Py_XDECREF(klist); } else { if (key == pongo_id) { sync |= PUT_ID; } else { k = from_python(self->ctx, key); } } v = from_python(self->ctx, value); if (!PyErr_Occurred()) { if (dbtype(self->ctx, k) == List) { if (fail) op = multi_SET_OR_FAIL; if (db_multi(SELF_CTX_AND_DBPTR, k, op, &v, sync) == 0) { ret = Py_None; } else { PyErr_SetObject(PyExc_KeyError, key); } } else if (db_multi(SELF_CTX_AND_DBPTR, k, op, &v, sync) == 0) { // db_mutli will tell us the newly created value of // "_id" when PUT_ID is enabled. ret = (sync & PUT_ID) ? to_python(self->ctx, v, TP_PROXY) : Py_None; } else { if (sync & PUT_ID) { PyErr_Format(PyExc_ValueError, "value must be a dictionary"); } else { PyErr_SetObject(PyExc_KeyError, key); } } } dbunlock(self->ctx); Py_XINCREF(ret); return ret; }
static PyObject * pongo_meta(PyObject *self, PyObject *args) { PongoCollection *data; pgctx_t *ctx; const char *key; PyObject *value = NULL, *ret = Py_None; if (!PyArg_ParseTuple(args, "Os|O:meta", &data, &key, &value)) return NULL; if (pongo_check(data)) return NULL; ctx = data->ctx; dblock(ctx); if (!strcmp(key, "chunksize")) { ret = PyLong_FromLongLong(ctx->root->meta.chunksize); if (value && value != Py_None) ctx->root->meta.chunksize = PyInt_AsLong(value); } else if (!strcmp(key, "id")) { ret = to_python(ctx, ctx->root->meta.id, 0); if (value) ctx->root->meta.id = from_python(ctx, value); } else if (!strcmp(key, ".sync")) { ret = PyInt_FromLong(ctx->sync); if (value && value != Py_None) ctx->sync = PyInt_AsLong(value); #ifdef WANT_UUID_TYPE } else if (!strcmp(key, ".uuid_class")) { ret = (PyObject*)uuid_class; if (value) uuid_class = (PyTypeObject*)value; } else if (!strcmp(key, ".uuid_constructor")) { ret = (PyObject*)uuid_constructor; if (value) uuid_constructor = value; #endif } else if (!strcmp(key, ".newkey")) { ret = pongo_newkey; if (value) { pongo_newkey = value; if (value == Py_None) { #ifdef WANT_UUID_TYPE ctx->newkey = _newkey; #else ctx->newkey = NULL; #endif } else { ctx->newkey = pongo_newkey_helper; } } } else { PyErr_Format(PyExc_Exception, "Unknown meta key %s", key); ret = NULL; } dbunlock(ctx); return ret; }
static dbtype_t pongo_newkey_helper(pgctx_t *ctx, dbtype_t value) { // FIXME: something wrong here. PyObject *ob; ob = PyObject_CallFunction(pongo_newkey, "(N)", to_python(ctx, value, 1)); if (ob) { value = from_python(ctx, ob); Py_DECREF(ob); } else { value = DBNULL; } return value; }
static PyObject * PongoDict_items(PongoDict *self) { dbtype_t db; _obj_t *obj; PyObject *ret = PyList_New(0); PyObject *item, *k, *v; int i; dblock(self->ctx); db.ptr = dbptr(self->ctx, self->dbptr); obj = dbptr(self->ctx, db.ptr->obj); for(i=0; i<obj->len; i++) { // FIXME: NULL is a valid key? if (obj->item[i].key.all) { k = to_python(self->ctx, obj->item[i].key, TP_PROXY); v = to_python(self->ctx, obj->item[i].value, TP_PROXY); item = Py_BuildValue("(OO)", k, v); PyList_Append(ret, item); Py_DECREF(k); Py_DECREF(v); Py_DECREF(item); } } dbunlock(self->ctx); return ret; }
static void to_python_helper(pgctx_t *ctx, dbtype_t node, void *user) { tphelper_t *h = (tphelper_t*)user; PyObject *kv; if (h->type == Collection || h->type == MultiCollection) { kv = to_python(ctx, node, h->flags); PyDict_SetItem(h->ob, PyTuple_GET_ITEM(kv, 0), PyTuple_GET_ITEM(kv, 1)); Py_DECREF(kv); } else { //FIXME: exception } }
static PyObject * PongoDict_create(PyObject *self, PyObject *arg) { PyObject *ret; PongoCollection *ref = (PongoCollection*)arg; dbtype_t dict; if (pongo_check(ref)) return NULL; dblock(ref->ctx); dict = dbobject_new(ref->ctx); ret = to_python(ref->ctx, dict, TP_PROXY); dbunlock(ref->ctx); return ret; }
static PyObject * PongoDict_GetItem(PongoDict *self, PyObject *key) { dbtype_t k, v; PyObject *ret = NULL; dblock(self->ctx); k = from_python(self->ctx, key); if (!PyErr_Occurred()) { if (dbobject_getitem(SELF_CTX_AND_DBPTR, k, &v) == 0) { ret = to_python(self->ctx, v, TP_PROXY); } else { PyErr_SetObject(PyExc_KeyError, key); } } dbunlock(self->ctx); return ret; }
static PyObject * pongo__object(PyObject *self, PyObject *args) { PyObject *ob; PongoCollection *data; uint64_t offset; dbtype_t db; if (!PyArg_ParseTuple(args, "OL:_object", &data, &offset)) return NULL; if (pongo_check(data)) return NULL; db.all = offset; dblock(data->ctx); ob = to_python(data->ctx, db, 1); dbunlock(data->ctx); return ob; }
friend PyObject* to_python(EnumType x) { return to_python(static_cast<long>(x)); }
static PyObject * PongoIter_next_item(PyObject *ob) { PongoIter *self = (PongoIter*)ob; dbval_t *internal, *pn; dbtype_t node; PyObject *ret=NULL, *k, *v; _list_t *list; _obj_t *obj; dblock(self->ctx); internal = dbptr(self->ctx, self->dbptr); if (!internal) { // The {List,Object,Collection} internal pointer is NULL, so // there is no iteration to do PyErr_SetNone(PyExc_StopIteration); } else if (self->tag == List) { list = (_list_t*)internal; if (self->pos < self->len) { ret = to_python(self->ctx, list->item[self->pos], TP_PROXY); self->pos++; } else { PyErr_SetNone(PyExc_StopIteration); } } else if (self->tag == Object) { obj = (_obj_t*)internal; if (self->pos < self->len) { k = to_python(self->ctx, obj->item[self->pos].key, TP_PROXY); v = to_python(self->ctx, obj->item[self->pos].value, TP_PROXY); ret = PyTuple_Pack(2, k, v); self->pos++; } else { PyErr_SetNone(PyExc_StopIteration); } } else if (self->tag == Collection || self->tag == MultiCollection) { if (self->pos && self->depth == -1) { PyErr_SetNone(PyExc_StopIteration); } else { // NOTE: I'm overloading the lowest bit to mean "already traversed the left // side". Normally, this bit is part of the 'type' field and would encode // this value as "Int". However, the BonsaiNode left/right can only point // to other BonsaiNodes and the stack (where the bit overloading is happening) // lives only in main memory, so we'll never write this bit modification // to disk. // If were at position 0, put the internal node onto the stack // I'm reusing pos as a flag since pos is otherwise unused by the tree iterator if (self->pos == 0) { self->stack[++self->depth] = self->dbptr; self->pos = 1; } // Get current top of stack. If we've already traversed the left side // of this node, go directly to the emit stage and traverse the right side. node = self->stack[self->depth]; pn = _ptr(self->ctx, node.all & ~1); if (node.all & 1) { node.all &= ~1; } else { // Walk as far left as possible, pushing onto stack as we // follow each link while (pn->left.all) { node = pn->left; self->stack[++self->depth] = node; pn = _ptr(self->ctx, node.all); } } // Now node, pn and top of stack all reference the same object, // so convert the object to python, pop the top of stack and // mark the new top as "left side traversed" ret = to_python(self->ctx, node, TP_NODEKEY|TP_NODEVAL|TP_PROXY); if (--self->depth >= 0) { self->stack[self->depth].all |= 1; } // Now check if there is a right branch in the tree and push // it for the next call to the iterator if (pn->right.all) { self->stack[++self->depth] = pn->right; } } } dbunlock(self->ctx); return ret; }
static PyObject * PongoIter_next_expr(PyObject *ob) { PongoIter *self = (PongoIter*)ob; dbval_t *internal, *pn; dbtype_t node, key, val; PyObject *ret=NULL, *k, *v; int ls, rs; _list_t *list; _obj_t *obj; dblock(self->ctx); internal = dbptr(self->ctx, self->dbptr); if (!internal) { PyErr_SetNone(PyExc_StopIteration); } else if (self->tag == List) { list = (_list_t*)internal; for(;;) { if (self->pos == self->len) { PyErr_SetNone(PyExc_StopIteration); break; } node = list->item[self->pos++]; if ((!self->lhdata.all || dbcmp(self->ctx, node, self->lhdata) >= self->lhex) && (!self->rhdata.all || dbcmp(self->ctx, node, self->rhdata) <= self->rhex)) { ret = to_python(self->ctx, node, TP_PROXY); break; } } } else if (self->tag == Object) { obj = (_obj_t*)internal; for(;;) { // If we've reached the end, quit with StopIteration if (self->pos == self->len) { PyErr_SetNone(PyExc_StopIteration); break; } key = obj->item[self->pos].key; val = obj->item[self->pos].value; self->pos++; // If the key doesn't satisfy the RHS, and since Objects are // sorted, we can quit with StopIteration if (!(!self->rhdata.all || dbcmp(self->ctx, key, self->rhdata) <= self->rhex)) { PyErr_SetNone(PyExc_StopIteration); break; } // If the key does satisfy the LHS, return it if (!self->lhdata.all || dbcmp(self->ctx, key, self->lhdata) >= self->lhex) { k = to_python(self->ctx, key, TP_PROXY); v = to_python(self->ctx, val, TP_PROXY); ret = PyTuple_Pack(2, k, v); break; } } } else if (self->tag == Collection || self->tag == MultiCollection) { if (self->pos && self->depth == -1) { PyErr_SetNone(PyExc_StopIteration); } else { // NOTE: I'm overloading the lowest bit to mean "already traversed the left // side". Normally, this bit is part of the 'type' field and would encode // this value as "Int". However, the BonsaiNode left/right can only point // to other BonsaiNodes and the stack (where the bit overloading is happening) // lives only in main memory, so we'll never write this bit modification // to disk. // If were at position 0, put the internal node onto the stack // I'm reusing pos as a flag since pos is otherwise unused by the tree iterator if (self->pos == 0) { node = self->dbptr; for(;;) { pn = _ptr(self->ctx, node.all); ls = (!self->lhdata.all || dbcmp(self->ctx, pn->key, self->lhdata) >= self->lhex); rs = (!self->rhdata.all || dbcmp(self->ctx, pn->key, self->rhdata) <= self->rhex); if (ls && rs) { self->stack[++self->depth] = node; self->pos = 1; break; } else if (ls && pn->left.all) { node = pn->left; } else if (rs && pn->right.all) { node = pn->right; } else { PyErr_SetNone(PyExc_StopIteration); goto exitproc; } } } // Get current top of stack. If we've already traversed the left side // of this node, go directly to the emit stage and traverse the right side. node = self->stack[self->depth]; pn = _ptr(self->ctx, node.all & ~1); if (node.all & 1) { node.all &= ~1; } else { // Walk as far left as possible, pushing onto stack as we // follow each link if (pn->left.all) { node = pn->left; for(;;) { pn = _ptr(self->ctx, node.all); ls = (!self->lhdata.all || dbcmp(self->ctx, pn->key, self->lhdata) >= self->lhex); rs = (!self->rhdata.all || dbcmp(self->ctx, pn->key, self->rhdata) <= self->rhex); if (ls && rs) { self->stack[++self->depth] = node; } if (ls && pn->left.all) { node = pn->left; } else if (rs && pn->right.all) { node = pn->right; } else { break; } } // Reset node and pn to whatever is on the top of stack now node = self->stack[self->depth]; pn = _ptr(self->ctx, node.all); } } // Now node, pn and top of stack all reference the same object, // so convert the object to python, pop the top of stack and // mark the new top as "left side traversed" ret = to_python(self->ctx, node, TP_NODEKEY|TP_NODEVAL|TP_PROXY); if (--self->depth >= 0) { self->stack[self->depth].all |= 1; } // Now check if there is a right branch in the tree and push // it for the next call to the iterator if (pn->right.all) { node = pn->right; for(;;) { pn = _ptr(self->ctx, node.all); ls = (!self->lhdata.all || dbcmp(self->ctx, pn->key, self->lhdata) >= self->lhex); rs = (!self->rhdata.all || dbcmp(self->ctx, pn->key, self->rhdata) <= self->rhex); if (ls && rs) { self->stack[++self->depth] = node; } if (ls && pn->left.all) { node = pn->left; } else if (rs && pn->right.all) { node = pn->right; } else { break; } } } } } exitproc: dbunlock(self->ctx); return ret; }
PyObject* to_python(const Cylinder& cylinder) { return to_python(new_<AnalyticImplicit<Cylinder>>(cylinder)); }
void embed_init_python(void) { FENTER; #ifndef PYTHON_SO_LIB #error "Python version needs passing in with -DPYTHON_SO_VERSION=libpython<ver>.so" #else #define PY_SO_LIB xstr(PYTHON_SO_LIB) #endif // Don't initialise python if already running if (gtstate) return; void * ret = utils_dyn_open(PY_SO_LIB); if (!ret) { fprintf(stderr, "Failed to find python lib\n"); } to_python(); // reset Program Name (i.e. argv[0]) if we are in a virtual environment char *venv_path_home = getenv("VIRTUAL_ENV"); if (venv_path_home) { char venv_path[strlen(venv_path_home)+64]; strcpy(venv_path, venv_path_home); strcat(venv_path, "/bin/python"); // this is universal in any VIRTUAL_ENV the python interpreter #if PY_MAJOR_VERSION >= 3 static wchar_t venv_path_w[1024]; #if PY_MINOR_VERSION >= 5 // Python3.5 + provides the locale decoder wcscpy(venv_path_w, Py_DecodeLocale(venv_path, NULL)); #else // for lesser python versions, we just hope the user specified his locales correctly setlocale (LC_ALL, ""); mbstowcs(venv_path_w, venv_path, sizeof(venv_path_w)); #endif LOG_INFO("Using virtualenv at %ls.", venv_path_w); Py_SetProgramName(venv_path_w); #else // Python2 case LOG_INFO("Using virtualenv at %s.", venv_path); Py_SetProgramName(venv_path); #endif } else { LOG_INFO("Did not detect virtual environment. Using system-wide Python interpreter."); } Py_Initialize(); /* Initialize the interpreter */ PySys_SetArgvEx(1, argv, 0); PyEval_InitThreads(); /* Create (and acquire) the interpreter lock */ /* Swap out and return current thread state and release the GIL */ gtstate = PyEval_SaveThread(); to_simulator(); /* Before returning we check if the user wants pause the simulator thread such that they can attach */ const char *pause = getenv("COCOTB_ATTACH"); if (pause) { long sleep_time = strtol(pause, NULL, 10); if (errno == ERANGE && (sleep_time == LONG_MAX || sleep_time == LONG_MIN)) { fprintf(stderr, "COCOTB_ATTACH only needs to be set to ~30 seconds"); goto out; } if ((errno != 0 && sleep_time == 0) || (sleep_time <= 0)) { fprintf(stderr, "COCOTB_ATTACH must be set to an integer base 10 or omitted"); goto out; } fprintf(stderr, "Waiting for %lu seconds - Attach to %d\n", sleep_time, getpid()); sleep(sleep_time); } out: FEXIT; }
int embed_sim_init(gpi_sim_info_t *info) { FENTER int i; int ret = 0; /* Check that we are not already initialised */ if (pEventFn) return ret; // Find the simulation root const char *dut = getenv("TOPLEVEL"); if (dut != NULL) { if (!strcmp("", dut)) { /* Empty string passed in, treat as NULL */ dut = NULL; } else { // Skip any library component of the toplevel char *dot = strchr(dut, '.'); if (dot != NULL) { dut += (dot - dut + 1); } } } PyObject *cocotb_module, *cocotb_init, *cocotb_args, *cocotb_retval; PyObject *simlog_obj, *simlog_func; PyObject *argv_list, *argc, *arg_dict, *arg_value; cocotb_module = NULL; arg_dict = NULL; //Ensure that the current thread is ready to callthe Python C API PyGILState_STATE gstate = PyGILState_Ensure(); to_python(); if (get_module_ref(COCOTB_MODULE, &cocotb_module)) goto cleanup; // Obtain the loggpi logger object simlog_obj = PyObject_GetAttrString(cocotb_module, "loggpi"); if (simlog_obj == NULL) { PyErr_Print(); fprintf(stderr, "Failed to to get simlog object\n"); } simlog_func = PyObject_GetAttrString(simlog_obj, "_printRecord"); if (simlog_func == NULL) { PyErr_Print(); fprintf(stderr, "Failed to get the _printRecord method"); goto cleanup; } if (!PyCallable_Check(simlog_func)) { PyErr_Print(); fprintf(stderr, "_printRecord is not callable"); goto cleanup; } set_log_handler(simlog_func); Py_DECREF(simlog_func); simlog_func = PyObject_GetAttrString(simlog_obj, "_willLog"); if (simlog_func == NULL) { PyErr_Print(); fprintf(stderr, "Failed to get the _willLog method"); goto cleanup; } if (!PyCallable_Check(simlog_func)) { PyErr_Print(); fprintf(stderr, "_willLog is not callable"); goto cleanup; } set_log_filter(simlog_func); argv_list = PyList_New(0); for (i = 0; i < info->argc; i++) { arg_value = PyString_FromString(info->argv[i]); PyList_Append(argv_list, arg_value); } arg_dict = PyModule_GetDict(cocotb_module); PyDict_SetItemString(arg_dict, "argv", argv_list); argc = PyInt_FromLong(info->argc); PyDict_SetItemString(arg_dict, "argc", argc); if (!PyCallable_Check(simlog_func)) { PyErr_Print(); fprintf(stderr, "_printRecord is not callable"); goto cleanup; } LOG_INFO("Running on %s version %s", info->product, info->version); LOG_INFO("Python interpreter initialised and cocotb loaded!"); // Now that logging has been set up ok we initialise the testbench if (-1 == PyObject_SetAttrString(cocotb_module, "SIM_NAME", PyString_FromString(info->product))) { PyErr_Print(); fprintf(stderr, "Unable to set SIM_NAME"); goto cleanup; } // Set language in use as an attribute to cocotb module, or None if not provided const char *lang = getenv("TOPLEVEL_LANG"); PyObject* PyLang; if (lang) PyLang = PyString_FromString(lang); else PyLang = Py_None; if (-1 == PyObject_SetAttrString(cocotb_module, "LANGUAGE", PyLang)) { fprintf(stderr, "Unable to set LANGUAGE"); goto cleanup; } // Hold onto a reference to our _fail_test function pEventFn = PyObject_GetAttrString(cocotb_module, "_sim_event"); if (!PyCallable_Check(pEventFn)) { PyErr_Print(); fprintf(stderr, "cocotb._sim_event is not callable"); goto cleanup; } Py_INCREF(pEventFn); cocotb_init = PyObject_GetAttrString(cocotb_module, "_initialise_testbench"); // New reference if (cocotb_init == NULL || !PyCallable_Check(cocotb_init)) { if (PyErr_Occurred()) PyErr_Print(); fprintf(stderr, "Cannot find function \"%s\"\n", "_initialise_testbench"); goto cleanup; } cocotb_args = PyTuple_New(1); if (dut == NULL) PyTuple_SetItem(cocotb_args, 0, Py_BuildValue("")); // Note: This function “steals” a reference to o. else PyTuple_SetItem(cocotb_args, 0, PyString_FromString(dut)); // Note: This function “steals” a reference to o. cocotb_retval = PyObject_CallObject(cocotb_init, cocotb_args); if (cocotb_retval != NULL) { LOG_DEBUG("_initialise_testbench successful"); Py_DECREF(cocotb_retval); } else { PyErr_Print(); fprintf(stderr,"Cocotb initialisation failed - exiting\n"); goto cleanup; } FEXIT goto ok; cleanup: ret = -1; ok: if (cocotb_module) { Py_DECREF(cocotb_module); } if (arg_dict) { Py_DECREF(arg_dict); } PyGILState_Release(gstate); to_simulator(); return ret; }
/** * @name Callback Handling * @brief Handle a callback coming from GPI * @ingroup python_c_api * * GILState before calling: Unknown * * GILState after calling: Unknown * * Makes one call to TAKE_GIL and one call to DROP_GIL * * Returns 0 on success or 1 on a failure. * * Handles a callback from the simulator, all of which call this function. * * We extract the associated context and find the Python function (usually * cocotb.scheduler.react) calling it with a reference to the trigger that * fired. The scheduler can then call next() on all the coroutines that * are waiting on that particular trigger. * * TODO: * - Tidy up return values * - Ensure cleanup correctly in exception cases * */ int handle_gpi_callback(void *user_data) { int ret = 0; to_python(); p_callback_data callback_data_p = (p_callback_data)user_data; if (callback_data_p->id_value != COCOTB_ACTIVE_ID) { fprintf(stderr, "Userdata corrupted!\n"); ret = 1; goto err; } callback_data_p->id_value = COCOTB_INACTIVE_ID; /* Cache the sim time */ gpi_get_sim_time(&cache_time.high, &cache_time.low); PyGILState_STATE gstate; gstate = TAKE_GIL(); // Python allowed if (!PyCallable_Check(callback_data_p->function)) { fprintf(stderr, "Callback fired but function isn't callable?!\n"); ret = 1; goto out; } // Call the callback PyObject *pValue = PyObject_Call(callback_data_p->function, callback_data_p->args, callback_data_p->kwargs); // If the return value is NULL a Python exception has occurred // The best thing to do here is shutdown as any subsequent // calls will go back to python which is now in an unknown state if (pValue == NULL) { fprintf(stderr, "ERROR: called callback function returned NULL\n"); if (PyErr_Occurred()) { fprintf(stderr, "Failed to execute callback due to python exception\n"); PyErr_Print(); } else { fprintf(stderr, "Failed to execute callback\n"); } gpi_sim_end(); ret = 0; goto out; } // Free up our mess Py_DECREF(pValue); // Callbacks may have been re-enabled if (callback_data_p->id_value == COCOTB_INACTIVE_ID) { Py_DECREF(callback_data_p->function); Py_DECREF(callback_data_p->args); // Free the callback data free(callback_data_p); } out: DROP_GIL(gstate); err: to_simulator(); return ret; }
PyObject * to_python(pgctx_t *ctx, dbtype_t db, int flags) { dbtag_t type; dbval_t *dv = NULL; PyObject *ob = NULL; PyObject *k, *v; epstr_t ea; epfloat_t fa; char *ma = NULL; uint32_t len = 0; _list_t *list; _obj_t *obj; struct tm tm; time_t time; long usec; int i; int64_t ival; tphelper_t h; if (db.all == 0) Py_RETURN_NONE; type = db.type; if (type == ByteBuffer || type == String) { ea.all = db.all; len = ea.len; ea.val[len] = 0; ma = (char*)ea.val; } else if (isPtr(type)) { dv = dbptr(ctx, db); type = dv->type; if (type == ByteBuffer || type == String) { len = dv->len; ma = (char*)dv->sval; } } switch(type) { case Boolean: ob = db.val ? Py_True : Py_False; Py_INCREF(ob); break; case Int: ival = db.val; if (ival < LONG_MIN || ival > LONG_MAX) { ob = PyLong_FromLongLong(ival); } else { ob = PyInt_FromLong((long)ival); } break; case Float: fa.ival = (int64_t)db.val << 4; ob = PyFloat_FromDouble(fa.fval); break; #ifdef WANT_UUID_TYPE case Uuid: ob = PyObject_CallFunction(uuid_constructor, "Os#", Py_None, dv->uuval, 16); break; #endif case ByteBuffer: ob = PyString_FromStringAndSize(ma, len); break; case String: ob = PyUnicode_FromStringAndSize(ma, len); break; case Datetime: time = db.val / 1000000LL; usec = db.val % 1000000LL; #ifdef WIN32 memcpy(&tm, gmtime(&time), sizeof(tm)); #else gmtime_r(&time, &tm); #endif ob = PyDateTime_FromDateAndTime( tm.tm_year+1900, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, usec); break; case List: if (flags & TP_PROXY) { ob = PongoList_Proxy(ctx, db); pidcache_put(ctx, ob, db); } else { if (flags & TP_PROXYCHLD) flags = (flags & ~TP_PROXYCHLD) | TP_PROXY; list = dbptr(ctx, dv->list); ob = PyList_New(list->len); for(i=0; i<list->len; i++) { v = to_python(ctx, list->item[i], flags); PyList_SET_ITEM(ob, i, v); // Don't need to decref v since SET_ITEM steals the reference } } break; case Object: if (flags & TP_PROXY) { ob = PongoDict_Proxy(ctx, db); pidcache_put(ctx, ob, db); } else { if (flags & TP_PROXYCHLD) flags = (flags & ~TP_PROXYCHLD) | TP_PROXY; obj = dbptr(ctx, dv->obj); ob = PyDict_New(); for(i=0; i<obj->len; i++) { k = to_python(ctx, obj->item[i].key, flags); v = to_python(ctx, obj->item[i].value, flags); PyDict_SetItem(ob, k, v); Py_DECREF(k); Py_DECREF(v); } } break; case Cache: // The cache is a collection case Collection: case MultiCollection: if (flags & TP_PROXY) { ob = PongoCollection_Proxy(ctx, db); pidcache_put(ctx, ob, db); } else { if (flags & TP_PROXYCHLD) flags = (flags & ~TP_PROXYCHLD) | TP_PROXY; h.flags = flags | (TP_NODEKEY|TP_NODEVAL); h.type = Collection; h.ob = ob = PyDict_New(); bonsai_foreach(ctx, dv->obj, to_python_helper, &h); } break; case _BonsaiNode: case _BonsaiMultiNode: k = v = NULL; if (flags & TP_NODEKEY) { k = to_python(ctx, dv->key, flags & ~(TP_NODEKEY|TP_NODEVAL)); ob = k; } if (flags & TP_NODEVAL) { if (type == _BonsaiMultiNode) { v = PyTuple_New(dv->nvalue); for(i=0; i<dv->nvalue; i++) { ob = to_python(ctx, dv->values[i], flags & ~(TP_NODEKEY|TP_NODEVAL)); PyTuple_SET_ITEM(v, i, ob); // Don't need to decref ob since SET_ITEM steals the reference } } else { v = to_python(ctx, dv->value, flags & ~(TP_NODEKEY|TP_NODEVAL)); } ob = v; } if (k && v) { ob = PyTuple_Pack(2, k, v); Py_DECREF(k); Py_DECREF(v); } if (!k && !v) { ob = PongoPointer_Proxy(ctx, db); } break; case _InternalList: case _InternalObj: ob = PongoPointer_Proxy(ctx, db); break; default: PyErr_Format(PyExc_Exception, "Cannot handle dbtype %d", type); } return ob; }
static inline PyObject* to_python(const compact_blob_t b) { return to_python(new_<compact_blob_py>(b)); }