static int pyth_metric_init (apr_pool_t *p) { DIR *dp; struct dirent *entry; int i; char* modname; PyObject *pmod, *pinitfunc, *pobj, *pparamdict; py_metric_init_t minfo; Ganglia_25metric *gmi; mapped_info_t *mi; const char* path = python_module.module_params; cfg_t *module_cfg; /* Allocate a pool that will be used by this module */ apr_pool_create(&pool, p); metric_info = apr_array_make(pool, 10, sizeof(Ganglia_25metric)); metric_mapping_info = apr_array_make(pool, 10, sizeof(mapped_info_t)); /* Verify path exists and can be read */ if (!path) { err_msg("[PYTHON] Missing python module path.\n"); return -1; } if (access(path, F_OK)) { /* 'path' does not exist */ err_msg("[PYTHON] Can't open the python module path %s.\n", path); return -1; } if (access(path, R_OK)) { /* Don't have read access to 'path' */ err_msg("[PYTHON] Can't read from the python module path %s.\n", path); return -1; } /* Init Python environment */ /* Set up the python path to be able to load module from our module path */ Py_Initialize(); PyObject *sys_path = PySys_GetObject("path"); PyObject *addpath = PyString_FromString(path); PyList_Append(sys_path, addpath); PyEval_InitThreads(); gtstate = PyEval_SaveThread(); /* Initialize each python module */ if ((dp = opendir(path)) == NULL) { /* Error: Cannot open the directory - Shouldn't happen */ /* Log? */ err_msg("[PYTHON] Can't open the python module path %s.\n", path); return -1; } i = 0; while ((entry = readdir(dp)) != NULL) { modname = is_python_module(entry->d_name); if (modname == NULL) continue; /* Find the specified module configuration in gmond.conf If this return NULL then either the module config doesn't exist or the module is disabled. */ module_cfg = find_module_config(modname); if (!module_cfg) continue; PyEval_RestoreThread(gtstate); pmod = PyImport_ImportModule(modname); if (!pmod) { /* Failed to import module. Log? */ err_msg("[PYTHON] Can't import the metric module [%s].\n", modname); if (PyErr_Occurred()) { PyErr_Print(); } gtstate = PyEval_SaveThread(); continue; } pinitfunc = PyObject_GetAttrString(pmod, "metric_init"); if (!pinitfunc || !PyCallable_Check(pinitfunc)) { /* No metric_init function. */ err_msg("[PYTHON] Can't find the metric_init function in the python module [%s].\n", modname); Py_DECREF(pmod); gtstate = PyEval_SaveThread(); continue; } /* Build a parameter dictionary to pass to the module */ pparamdict = build_params_dict(module_cfg); if (!pparamdict || !PyDict_Check(pparamdict)) { /* No metric_init function. */ err_msg("[PYTHON] Can't build the parameters dictionary for [%s].\n", modname); Py_DECREF(pmod); gtstate = PyEval_SaveThread(); continue; } /* Now call the metric_init method of the python module */ pobj = PyObject_CallFunction(pinitfunc, "(N)", pparamdict); if (!pobj) { /* failed calling metric_init */ err_msg("[PYTHON] Can't call the metric_init function in the python module [%s].\n", modname); if (PyErr_Occurred()) { PyErr_Print(); } Py_DECREF(pinitfunc); Py_DECREF(pmod); gtstate = PyEval_SaveThread(); continue; } if (PyList_Check(pobj)) { int j; int size = PyList_Size(pobj); for (j = 0; j < size; j++) { PyObject* plobj = PyList_GetItem(pobj, j); if (PyMapping_Check(plobj)) { fill_metric_info(plobj, &minfo, modname, pool); gmi = (Ganglia_25metric*)apr_array_push(metric_info); fill_gmi(gmi, &minfo); mi = (mapped_info_t*)apr_array_push(metric_mapping_info); mi->pmod = pmod; mi->mod_name = apr_pstrdup(pool, modname); mi->pcb = minfo.pcb; } } } else if (PyMapping_Check(pobj)) { fill_metric_info(pobj, &minfo, modname, pool); gmi = (Ganglia_25metric*)apr_array_push(metric_info); fill_gmi(gmi, &minfo); mi = (mapped_info_t*)apr_array_push(metric_mapping_info); mi->pmod = pmod; mi->mod_name = apr_pstrdup(pool, modname); mi->pcb = minfo.pcb; } Py_DECREF(pobj); Py_DECREF(pinitfunc); gtstate = PyEval_SaveThread(); } closedir(dp); apr_pool_cleanup_register(pool, NULL, pyth_metric_cleanup, apr_pool_cleanup_null); /* Replace the empty static metric definition array with the dynamic array that we just created */ /*XXX Need to put this into a finalize MACRO. This is just pushing a NULL entry onto the array so that the looping logic can determine the end if the array. We should probably give back a ready APR array rather than a pointer to a Ganglia_25metric array. */ gmi = apr_array_push(metric_info); memset (gmi, 0, sizeof(*gmi)); mi = apr_array_push(metric_mapping_info); memset (mi, 0, sizeof(*mi)); python_module.metrics_info = (Ganglia_25metric *)metric_info->elts; return 0; }
static PyObject * DiscoDB_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { DiscoDB *self = (DiscoDB *)type->tp_alloc(type, 0); PyObject *arg = NULL, *item = NULL, *items = NULL, *iteritems = NULL, *itervalues = NULL, *vpack = NULL, *value = NULL, *values = NULL, *valueseq = NULL; struct ddb_cons *ddb_cons = NULL; struct ddb_entry *kentry = NULL, *ventry = NULL; uint64_t n, flags = 0, disable_compression = 0, unique_items = 0; static char *kwlist[] = {"arg", "disable_compression", "unique_items", NULL}; if (self != NULL) { if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OII", kwlist, &arg, &disable_compression, &unique_items)) goto Done; if (disable_compression) flags |= DDB_OPT_DISABLE_COMPRESSION; if (unique_items) flags |= DDB_OPT_UNIQUE_ITEMS; if (arg == NULL) /* null constructor */ items = PyTuple_New(0); else if (PyMapping_Check(arg)) /* copy constructor */ items = PyMapping_Items(arg); else /* iter constructor */ Py_INCREF(items = arg); iteritems = PyObject_GetIter(items); if (iteritems == NULL) goto Done; ddb_cons = ddb_cons_alloc(); if (ddb_cons == NULL) goto Done; while ((item = PyIter_Next(iteritems))) { kentry = ddb_entry_alloc(1); if (kentry == NULL) goto Done; if (!PyArg_ParseTuple(item, "s#O", &kentry->data, &kentry->length, &values)) goto Done; Py_XINCREF(values); if (values == NULL) values = PyTuple_New(0); if (PyString_Check(values)) valueseq = Py_BuildValue("(O)", values); else Py_XINCREF(valueseq = values); if (valueseq == NULL) goto Done; itervalues = PyObject_GetIter(valueseq); if (itervalues == NULL) goto Done; for (n = 0; (value = PyIter_Next(itervalues)); n++) { ventry = ddb_entry_alloc(1); if (ventry == NULL) goto Done; vpack = Py_BuildValue("(O)", value); if (vpack == NULL) goto Done; if (!PyArg_ParseTuple(vpack, "s#", &ventry->data, &ventry->length)) goto Done; if (ddb_add(ddb_cons, kentry, ventry)) { PyErr_SetString(DiscoDBError, "Construction failed"); goto Done; } Py_CLEAR(vpack); Py_CLEAR(value); DiscoDB_CLEAR(ventry); } if (n == 0) if (ddb_add(ddb_cons, kentry, NULL)) { PyErr_SetString(DiscoDBError, "Construction failed"); goto Done; } Py_CLEAR(itervalues); Py_CLEAR(item); Py_CLEAR(values); Py_CLEAR(valueseq); DiscoDB_CLEAR(kentry); } } self->obuffer = NULL; self->cbuffer = ddb_finalize(ddb_cons, &n, flags); if (self->cbuffer == NULL) { PyErr_SetString(DiscoDBError, "Construction finalization failed"); goto Done; } self->discodb = ddb_alloc(); if (self->discodb == NULL) goto Done; if (ddb_loads(self->discodb, self->cbuffer, n)) if (ddb_has_error(self->discodb)) goto Done; Done: ddb_cons_dealloc(ddb_cons); Py_CLEAR(item); Py_CLEAR(items); Py_CLEAR(iteritems); Py_CLEAR(itervalues); Py_CLEAR(vpack); Py_CLEAR(value); Py_CLEAR(values); Py_CLEAR(valueseq); DiscoDB_CLEAR(kentry); DiscoDB_CLEAR(ventry); if (PyErr_Occurred()) { Py_CLEAR(self); return NULL; } return (PyObject *)self; }
static PyObject* getenvironment(PyObject* environment) { int i, envsize; PyObject* out = NULL; PyObject* keys; PyObject* values; char* p; /* convert environment dictionary to windows enviroment string */ if (! PyMapping_Check(environment)) { PyErr_SetString( PyExc_TypeError, "environment must be dictionary or None"); return NULL; } envsize = PyMapping_Length(environment); keys = PyMapping_Keys(environment); values = PyMapping_Values(environment); if (!keys || !values) goto error; out = PyString_FromStringAndSize(NULL, 2048); if (! out) goto error; p = PyString_AS_STRING(out); for (i = 0; i < envsize; i++) { int ksize, vsize, totalsize; PyObject* key = PyList_GET_ITEM(keys, i); PyObject* value = PyList_GET_ITEM(values, i); if (! PyString_Check(key) || ! PyString_Check(value)) { PyErr_SetString(PyExc_TypeError, "environment can only contain strings"); goto error; } ksize = PyString_GET_SIZE(key); vsize = PyString_GET_SIZE(value); totalsize = (p - PyString_AS_STRING(out)) + ksize + 1 + vsize + 1 + 1; if (totalsize > PyString_GET_SIZE(out)) { int offset = p - PyString_AS_STRING(out); _PyString_Resize(&out, totalsize + 1024); p = PyString_AS_STRING(out) + offset; } memcpy(p, PyString_AS_STRING(key), ksize); p += ksize; *p++ = '='; memcpy(p, PyString_AS_STRING(value), vsize); p += vsize; *p++ = '\0'; } /* add trailing null byte */ *p++ = '\0'; _PyString_Resize(&out, p - PyString_AS_STRING(out)); /* PyObject_Print(out, stdout, 0); */ Py_XDECREF(keys); Py_XDECREF(values); return out; error: Py_XDECREF(out); Py_XDECREF(keys); Py_XDECREF(values); return NULL; }
PyFrameObject * PyFrame_New(PyThreadState *tstate, PyCodeObject *code, PyObject *globals, PyObject *locals) { PyFrameObject *back = tstate->frame; PyFrameObject *f; PyObject *builtins; Py_ssize_t i; #ifdef Py_DEBUG if (code == NULL || globals == NULL || !PyDict_Check(globals) || (locals != NULL && !PyMapping_Check(locals))) { PyErr_BadInternalCall(); return NULL; } #endif if (back == NULL || back->f_globals != globals) { builtins = PyDict_GetItem(globals, builtin_object); if (builtins) { if (PyModule_Check(builtins)) { builtins = PyModule_GetDict(builtins); assert(!builtins || PyDict_Check(builtins)); } else if (!PyDict_Check(builtins)) builtins = NULL; } if (builtins == NULL) { /* No builtins! Make up a minimal one Give them 'None', at least. */ builtins = PyDict_New(); if (builtins == NULL || PyDict_SetItemString( builtins, "None", Py_None) < 0) return NULL; } else Py_INCREF(builtins); } else { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; assert(builtins != NULL && PyDict_Check(builtins)); Py_INCREF(builtins); } if (code->co_zombieframe != NULL) { f = code->co_zombieframe; code->co_zombieframe = NULL; _Py_NewReference((PyObject *)f); assert(f->f_code == code); } else { Py_ssize_t extras, ncells, nfrees; ncells = PyTuple_GET_SIZE(code->co_cellvars); nfrees = PyTuple_GET_SIZE(code->co_freevars); extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; if (free_list == NULL) { f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); if (f == NULL) { Py_DECREF(builtins); return NULL; } } else { assert(numfree > 0); --numfree; f = free_list; free_list = free_list->f_back; if (Py_SIZE(f) < extras) { f = PyObject_GC_Resize(PyFrameObject, f, extras); if (f == NULL) { Py_DECREF(builtins); return NULL; } } _Py_NewReference((PyObject *)f); } f->f_code = code; extras = code->co_nlocals + ncells + nfrees; f->f_valuestack = f->f_localsplus + extras; for (i=0; i<extras; i++) f->f_localsplus[i] = NULL; f->f_locals = NULL; f->f_trace = NULL; f->f_exc_type = f->f_exc_value = f->f_exc_traceback = NULL; } f->f_stacktop = f->f_valuestack; f->f_builtins = builtins; Py_XINCREF(back); f->f_back = back; Py_INCREF(code); Py_INCREF(globals); f->f_globals = globals; /* Most functions have CO_NEWLOCALS and CO_OPTIMIZED set. */ if ((code->co_flags & (CO_NEWLOCALS | CO_OPTIMIZED)) == (CO_NEWLOCALS | CO_OPTIMIZED)) ; /* f_locals = NULL; will be set by PyFrame_FastToLocals() */ else if (code->co_flags & CO_NEWLOCALS) { locals = PyDict_New(); if (locals == NULL) { Py_DECREF(f); return NULL; } f->f_locals = locals; } else { if (locals == NULL) locals = globals; Py_INCREF(locals); f->f_locals = locals; } f->f_tstate = tstate; f->f_lasti = -1; f->f_lineno = code->co_firstlineno; f->f_iblock = 0; _PyObject_GC_TRACK(f); return f; }
dbtype_t from_python(pgctx_t *ctx, PyObject *ob) { dbtype_t db; char *buf; Py_ssize_t length; PyObject *items; struct tm tm; long usec; //int i; if (PyObject_HasAttrString(ob, "__topongo__")) { ob = PyObject_CallMethod(ob, "__topongo__", NULL); if (PyErr_Occurred()) return DBNULL; } if (ob == Py_None) { db = DBNULL; } else if (ob == pongo_id) { db = dbuuid_new(ctx, NULL); } else if (ob == pongo_utcnow) { db = dbtime_now(ctx); } else if (PyBool_Check(ob)) { db = dbboolean_new(ctx, ob == Py_True); } else if (PyInt_Check(ob)) { db = dbint_new(ctx, PyInt_AsLong(ob)); } else if (PyLong_Check(ob)) { db = dbint_new(ctx, PyLong_AsLongLong(ob)); } else if (PyFloat_Check(ob)) { db = dbfloat_new(ctx, PyFloat_AsDouble(ob)); } else if (PyString_Check(ob)) { PyString_AsStringAndSize(ob, &buf, &length); // FIXME: //db = dbbuffer_new(ctx, buf, length); db = dbstring_new(ctx, buf, length); } else if (PyUnicode_Check(ob)) { ob = PyUnicode_AsUTF8String(ob); if (ob) { PyString_AsStringAndSize(ob, &buf, &length); db = dbstring_new(ctx, buf, length); Py_DECREF(ob); } } else if (PyDateTime_Check(ob)) { memset(&tm, 0, sizeof(tm)); tm.tm_year = PyDateTime_GET_YEAR(ob); tm.tm_mon = PyDateTime_GET_MONTH(ob); tm.tm_mday = PyDateTime_GET_DAY(ob); tm.tm_hour = PyDateTime_DATE_GET_HOUR(ob); tm.tm_min = PyDateTime_DATE_GET_MINUTE(ob); tm.tm_sec = PyDateTime_DATE_GET_SECOND(ob); usec = PyDateTime_DATE_GET_MICROSECOND(ob); tm.tm_year -= 1900; db = dbtime_newtm(ctx, &tm, usec); #ifdef WANT_UUID_TYPE } else if (PyObject_TypeCheck(ob, uuid_class)) { ob = PyObject_CallMethod(ob, "get_bytes", NULL); PyString_AsStringAndSize(ob, &buf, &length); db = dbuuid_new(ctx, (uint8_t*)buf); #endif } else if (Py_TYPE(ob) == &PongoList_Type) { // Resolve proxy types back to their original dbtype PongoList *p = (PongoList*)ob; db = p->dbptr; } else if (Py_TYPE(ob) == &PongoDict_Type) { // Resolve proxy types back to their original dbtype PongoDict *p = (PongoDict*)ob; db = p->dbptr; } else if (Py_TYPE(ob) == &PongoCollection_Type) { // Resolve proxy types back to their original dbtype PongoCollection *p = (PongoCollection*)ob; db = p->dbptr; } else if (PyMapping_Check(ob)) { length = PyMapping_Length(ob); items = PyMapping_Items(ob); if (items) { // mapping object implements "items" db = dbobject_new(ctx); dbobject_update(ctx, db, length, _py_mapping_cb, items, NOSYNC); Py_XDECREF(items); } else { // mapping object implements iterator protocol // don't have to decref the iterator object cuz it self-decrefs // upon StopIteration PyErr_Clear(); items = PyObject_GetIter(ob); db = dbobject_new(ctx); dbobject_update(ctx, db, length, _py_itermapping_cb, items, NOSYNC); } } else if (PySequence_Check(ob)) { length = PySequence_Length(ob); db = dblist_new(ctx); dblist_extend(ctx, db, length, _py_sequence_cb, ob, NOSYNC); } else { // FIXME: Unknown object type PyErr_SetObject(PyExc_TypeError, (PyObject*)Py_TYPE(ob)); db = DBNULL; } return db; }
static Datum PLyMapping_ToComposite(PLyTypeInfo *info, TupleDesc desc, PyObject *mapping) { HeapTuple tuple; Datum *values; bool *nulls; volatile int i; Assert(PyMapping_Check(mapping)); if (info->is_rowtype == 2) PLy_output_tuple_funcs(info, desc); Assert(info->is_rowtype == 1); /* Build tuple */ values = palloc(sizeof(Datum) * desc->natts); nulls = palloc(sizeof(bool) * desc->natts); for (i = 0; i < desc->natts; ++i) { char *key; PyObject *volatile value; PLyObToDatum *att; if (desc->attrs[i]->attisdropped) { values[i] = (Datum) 0; nulls[i] = true; continue; } key = NameStr(desc->attrs[i]->attname); value = NULL; att = &info->out.r.atts[i]; PG_TRY(); { value = PyMapping_GetItemString(mapping, key); if (value == Py_None) { values[i] = (Datum) NULL; nulls[i] = true; } else if (value) { values[i] = (att->func) (att, -1, value); nulls[i] = false; } else ereport(ERROR, (errcode(ERRCODE_UNDEFINED_COLUMN), errmsg("key \"%s\" not found in mapping", key), errhint("To return null in a column, " "add the value None to the mapping with the key named after the column."))); Py_XDECREF(value); value = NULL; } PG_CATCH(); { Py_XDECREF(value); PG_RE_THROW(); } PG_END_TRY(); } tuple = heap_form_tuple(desc, values, nulls); ReleaseTupleDesc(desc); pfree(values); pfree(nulls); return HeapTupleGetDatum(tuple); }
/** * Static function to check if the input object is a Mapping * and could thus be wrapped by this class. */ bool PyMappingSTL::Check( PyObject * pObject ) { return pObject != NULL && PyMapping_Check( pObject ) != 0; }
/* * Convert a Python object to a composite type. First look up the type's * description, then route the Python object through the conversion function * for obtaining PostgreSQL tuples. */ static Datum PLyObject_ToComposite(PLyObToDatum *arg, PyObject *plrv, bool *isnull, bool inarray) { Datum rv; TupleDesc desc; if (plrv == Py_None) { *isnull = true; return (Datum) 0; } *isnull = false; /* * The string conversion case doesn't require a tupdesc, nor per-field * conversion data, so just go for it if that's the case to use. */ if (PyString_Check(plrv) || PyUnicode_Check(plrv)) return PLyString_ToComposite(arg, plrv, inarray); /* * If we're dealing with a named composite type, we must look up the * tupdesc every time, to protect against possible changes to the type. * RECORD types can't change between calls; but we must still be willing * to set up the info the first time, if nobody did yet. */ if (arg->typoid != RECORDOID) { desc = lookup_rowtype_tupdesc(arg->typoid, arg->typmod); /* We should have the descriptor of the type's typcache entry */ Assert(desc == arg->u.tuple.typentry->tupDesc); /* Detect change of descriptor, update cache if needed */ if (arg->u.tuple.tupdescseq != arg->u.tuple.typentry->tupDescSeqNo) { PLy_output_setup_tuple(arg, desc, PLy_current_execution_context()->curr_proc); arg->u.tuple.tupdescseq = arg->u.tuple.typentry->tupDescSeqNo; } } else { desc = arg->u.tuple.recdesc; if (desc == NULL) { desc = lookup_rowtype_tupdesc(arg->typoid, arg->typmod); arg->u.tuple.recdesc = desc; } else { /* Pin descriptor to match unpin below */ PinTupleDesc(desc); } } /* Simple sanity check on our caching */ Assert(desc->natts == arg->u.tuple.natts); /* * Convert, using the appropriate method depending on the type of the * supplied Python object. */ if (PySequence_Check(plrv)) /* composite type as sequence (tuple, list etc) */ rv = PLySequence_ToComposite(arg, desc, plrv); else if (PyMapping_Check(plrv)) /* composite type as mapping (currently only dict) */ rv = PLyMapping_ToComposite(arg, desc, plrv); else /* returned as smth, must provide method __getattr__(name) */ rv = PLyGenericObject_ToComposite(arg, desc, plrv, inarray); ReleaseTupleDesc(desc); return rv; }
static Datum PLyMapping_ToComposite(PLyObToDatum *arg, TupleDesc desc, PyObject *mapping) { Datum result; HeapTuple tuple; Datum *values; bool *nulls; volatile int i; Assert(PyMapping_Check(mapping)); /* Build tuple */ values = palloc(sizeof(Datum) * desc->natts); nulls = palloc(sizeof(bool) * desc->natts); for (i = 0; i < desc->natts; ++i) { char *key; PyObject *volatile value; PLyObToDatum *att; Form_pg_attribute attr = TupleDescAttr(desc, i); if (attr->attisdropped) { values[i] = (Datum) 0; nulls[i] = true; continue; } key = NameStr(attr->attname); value = NULL; att = &arg->u.tuple.atts[i]; PG_TRY(); { value = PyMapping_GetItemString(mapping, key); if (!value) ereport(ERROR, (errcode(ERRCODE_UNDEFINED_COLUMN), errmsg("key \"%s\" not found in mapping", key), errhint("To return null in a column, " "add the value None to the mapping with the key named after the column."))); values[i] = att->func(att, value, &nulls[i], false); Py_XDECREF(value); value = NULL; } PG_CATCH(); { Py_XDECREF(value); PG_RE_THROW(); } PG_END_TRY(); } tuple = heap_form_tuple(desc, values, nulls); result = heap_copy_tuple_as_datum(tuple, desc); heap_freetuple(tuple); pfree(values); pfree(nulls); return result; }
PyObject* Context_new(PyTypeObject* type, PyObject* args, PyObject* kwargs) { Context* self = NULL; Runtime* runtime = NULL; PyObject* global = NULL; if(!PyArg_ParseTuple( args, "O!|O", RuntimeType, &runtime, &global )) goto error; if(global != NULL && !PyMapping_Check(global)) { PyErr_SetString(PyExc_TypeError, "Global handler must provide item access."); goto error; } self = (Context*) type->tp_alloc(type, 0); if(self == NULL) goto error; // Tracking what classes we've installed in // the context. self->classes = (PyDictObject*) PyDict_New(); if(self->classes == NULL) goto error; self->objects = (PySetObject*) PySet_New(NULL); if(self->objects == NULL) goto error; self->cx = JS_NewContext(runtime->rt, 8192); if(self->cx == NULL) { PyErr_SetString(PyExc_RuntimeError, "Failed to create JSContext."); goto error; } JS_BeginRequest(self->cx); /* * Notice that we don't add a ref to the Python context for * the copy stored on the JSContext*. I'm pretty sure this * would cause a cyclic dependancy that would prevent * garbage collection from happening on either side of the * bridge. * * To make sure that the context stays alive we'll add a * reference to the Context* anytime we wrap a Python * object for use in JS. * */ JS_SetContextPrivate(self->cx, self); // Setup the root of the property lookup doodad. self->root = JS_NewObject(self->cx, &js_global_class, NULL, NULL); if(self->root == NULL) { PyErr_SetString(PyExc_RuntimeError, "Error creating root object."); goto error; } if(!JS_InitStandardClasses(self->cx, self->root)) { PyErr_SetString(PyExc_RuntimeError, "Error initializing JS VM."); goto error; } // Don't setup the global handler until after the standard classes // have been initialized. // XXX: Does anyone know if finalize is called if new fails? if(global != NULL) Py_INCREF(global); self->global = global; // Setup counters for resource limits self->branch_count = 0; self->max_time = 0; self->start_time = 0; self->max_heap = 0; JS_SetBranchCallback(self->cx, branch_cb); JS_SetErrorReporter(self->cx, report_error_cb); Py_INCREF(runtime); self->rt = runtime; goto success; error: if(self != NULL && self->cx != NULL) JS_EndRequest(self->cx); Py_XDECREF(self); self = NULL; success: if(self != NULL && self->cx != NULL) JS_EndRequest(self->cx); return (PyObject*) self; }
static PyObject * KyotoDB_update(KyotoDB *self, PyObject *args, PyObject *kwds) { Py_ssize_t size = PyTuple_Size(args); if (size > 1) { APR str(PyString_FromFormat("update expected at most 1 arguments, got %zd", size)); PyErr_SetObject(PyExc_TypeError, str.get()); return NULL; } if (size == 1) { APR obj(PyTuple_GetItem(args, 0)); if (PyMapping_Check(obj.get())) { if (!KyotoDB_update_with_mapping(self, obj.get())) return NULL; } else { APR iterator (PyObject_GetIter(obj.get())); APR item; if (iterator == NULL) { PyErr_SetString(PyExc_RuntimeError, "object is not iterable"); return NULL; } Py_ssize_t i = 0; while ((item = PyIter_Next(iterator.get())) != NULL) { if (!PySequence_Check(item.get())) { APR str(PyString_FromFormat("cannot convert dictionary update sequence element #%zd to a sequence", i)); PyErr_SetObject(PyExc_TypeError, str.get()); return NULL; } if (PySequence_Size(item.get()) != 2) { APR str(PyString_FromFormat("dictionary update sequence element #%zd has length %zd; 2 is required", i, PySequence_Size(item.get()))); PyErr_SetObject(PyExc_TypeError, str.get()); return NULL; } APR key(PySequence_GetItem(item.get(), 0)); APR value(PySequence_GetItem(item.get(), 1)); bool ok; std::string ckey = KyotoDB_dump(key, self->use_pickle, &ok); if (!ok) return NULL; std::string cvalue = KyotoDB_dump(value, self->use_pickle, &ok); if (!ok) return NULL; self->m_db->set(ckey, cvalue); i++; } } } if (kwds) { if (!KyotoDB_update_with_mapping(self, kwds)) return NULL; } Py_RETURN_NONE; }