PyObject *mapping_to_dict(PyObject* mapping) { PyObject *items=NULL, *item=NULL, *new_dict=NULL; if (PyDict_Check(mapping)){ Py_INCREF(mapping); return mapping; } if (!PyMapping_Check(mapping)){ PyErr_SetString(PyExc_TypeError,"Object must be a mapping (dictionary, class instance, etc"); return NULL; } //any generic mapping (theoretically) - tested using class that defines its own items method items=PyMapping_Items(mapping); if (items!=NULL){ new_dict=PyDict_New(); if (new_dict!=NULL){ for (int item_ind=0;item_ind<PySequence_Length(items);item_ind++){ item=PySequence_GetItem(items,item_ind); PyDict_SetItem(new_dict,PyTuple_GetItem(item,0),PyTuple_GetItem(item,1)); Py_DECREF(item); } } Py_DECREF(items); return new_dict; } PyErr_Clear(); // PyMapping_Items doesn't work for class instances (no "items" method ????) new_dict=PyObject_GetAttrString(mapping,"__dict__"); return new_dict; }
/* * PLyMapping_ToJsonbValue * * Transform Python dict to JsonbValue. */ static JsonbValue * PLyMapping_ToJsonbValue(PyObject *obj, JsonbParseState **jsonb_state) { Py_ssize_t pcount; JsonbValue *out = NULL; /* We need it volatile, since we use it after longjmp */ volatile PyObject *items_v = NULL; pcount = PyMapping_Size(obj); items_v = PyMapping_Items(obj); PG_TRY(); { Py_ssize_t i; PyObject *items; items = (PyObject *) items_v; pushJsonbValue(jsonb_state, WJB_BEGIN_OBJECT, NULL); for (i = 0; i < pcount; i++) { JsonbValue jbvKey; PyObject *item = PyList_GetItem(items, i); PyObject *key = PyTuple_GetItem(item, 0); PyObject *value = PyTuple_GetItem(item, 1); /* Python dictionary can have None as key */ if (key == Py_None) { jbvKey.type = jbvString; jbvKey.val.string.len = 0; jbvKey.val.string.val = ""; } else { /* All others types of keys we serialize to string */ PLyString_ToJsonbValue(key, &jbvKey); } (void) pushJsonbValue(jsonb_state, WJB_KEY, &jbvKey); (void) PLyObject_ToJsonbValue(value, jsonb_state, false); } out = pushJsonbValue(jsonb_state, WJB_END_OBJECT, NULL); } PG_CATCH(); { Py_DECREF(items_v); PG_RE_THROW(); } PG_END_TRY(); return out; }
PyObject* PyGLPSPrint(PyObject *, PyObject *args) { try { PyObject *inp; if(!PyArg_ParseTuple(args, "O", &inp)) return NULL; PyRef<> list; if(PyDict_Check(inp)) { list.reset(PyMapping_Items(inp)); inp = list.py(); } if(!PyList_Check(inp)) return PyErr_Format(PyExc_ValueError, "argument must be dict or list of tuples"); Config conf; List2Config(conf, inp); std::ostringstream strm; GLPSPrint(strm, conf); return PyString_FromString(strm.str().c_str()); }CATCH() }
static PyObject * PongoDict_update(PongoDict *self, PyObject *args, PyObject *kwargs) { PyObject *iter, *items; PyObject *ret = NULL; int length; int sync = self->ctx->sync; char *kwlist[] = {"iter", "sync", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|i:update", kwlist, &iter, &sync)) return NULL; dblock(self->ctx); if (PyMapping_Check(iter)) { length = PyMapping_Length(iter); items = PyMapping_Items(iter); if (items) { // mapping object implementes "items" if (dbobject_update(SELF_CTX_AND_DBPTR, length, _py_mapping_cb, items, sync) == 0) ret = Py_None; Py_DECREF(items); } else { // mapping object implements iterator protocol // don't have to decref the iterator because it self-decrefs // upon StopIteration PyErr_Clear(); items = PyObject_GetIter(iter); if (dbobject_update(SELF_CTX_AND_DBPTR, length, _py_itermapping_cb, items, sync) == 0) ret = Py_None; } } dbunlock(self->ctx); Py_XINCREF(ret); return ret; }
static PyObject * proxy_items(proxyobject *pp) { return PyMapping_Items(pp->dict); }
Datum plpython_to_hstore(PG_FUNCTION_ARGS) { PyObject *dict; volatile PyObject *items_v = NULL; int32 pcount; HStore *out; dict = (PyObject *) PG_GETARG_POINTER(0); if (!PyMapping_Check(dict)) ereport(ERROR, (errcode(ERRCODE_WRONG_OBJECT_TYPE), errmsg("not a Python mapping"))); pcount = PyMapping_Size(dict); items_v = PyMapping_Items(dict); PG_TRY(); { int32 buflen; int32 i; Pairs *pairs; PyObject *items = (PyObject *) items_v; pairs = palloc(pcount * sizeof(*pairs)); for (i = 0; i < pcount; i++) { PyObject *tuple; PyObject *key; PyObject *value; tuple = PyList_GetItem(items, i); key = PyTuple_GetItem(tuple, 0); value = PyTuple_GetItem(tuple, 1); pairs[i].key = PLyObject_AsString(key); pairs[i].keylen = hstoreCheckKeyLen(strlen(pairs[i].key)); pairs[i].needfree = true; if (value == Py_None) { pairs[i].val = NULL; pairs[i].vallen = 0; pairs[i].isnull = true; } else { pairs[i].val = PLyObject_AsString(value); pairs[i].vallen = hstoreCheckValLen(strlen(pairs[i].val)); pairs[i].isnull = false; } } Py_DECREF(items_v); pcount = hstoreUniquePairs(pairs, pcount, &buflen); out = hstorePairs(pairs, pcount, buflen); } PG_CATCH(); { Py_DECREF(items_v); PG_RE_THROW(); } PG_END_TRY(); PG_RETURN_POINTER(out); }
it is in 0.3 of the list\n\ \n\ "; static PyObject *clistfns_contents(PyObject *self, PyObject *args) { int i; PyObject *items, *counts, *percentages; PyObject *countitems, *countitem; PyObject *key, *count, *perc; long c; double total; if(!PyArg_ParseTuple(args, "O", &items)) return NULL; if(!PySequence_Check(items)) { PyErr_SetString(PyExc_TypeError, "expected mapping type"); return NULL; } if((total = PySequence_Length(items)) == -1) { PyErr_SetString(PyExc_ValueError, "I couldn't get length of item."); return NULL; } counts = clistfns_count(self, args); if(!counts || PyErr_Occurred()) return NULL; if(!(percentages = PyDict_New())) { Py_DECREF(counts); return NULL; } /* Loop through every element in counts, calculating the probabilities. */ if(!(countitems = PyMapping_Items(counts))) { Py_DECREF(counts); Py_DECREF(percentages); return NULL; } /* Go through the loop, counting how often each item appears. */ i = 0; while(1) { if(!(countitem = PyList_GetItem(countitems, i))) { PyErr_Clear(); /* clear the exception set by PyList_GetItem */ break; /* no more numbers */ } key = PyTuple_GetItem(countitem, 0); count = PyTuple_GetItem(countitem, 1); c = PyInt_AsLong(count); perc = PyFloat_FromDouble((double)c / total); PyDict_SetItem(percentages, key, perc); Py_DECREF(perc); if(PyErr_Occurred()) /* PyDict_SetItem failed */ break; i++; } if(PyErr_Occurred()) { Py_DECREF(percentages); percentages = NULL; } Py_DECREF(countitems); Py_DECREF(counts); return percentages; }
static PyObject * DiscoDB_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { DiscoDB *self = (DiscoDB *)type->tp_alloc(type, 0); PyObject *arg = NULL, *item = NULL, *items = NULL, *iteritems = NULL, *itervalues = NULL, *vpack = NULL, *value = NULL, *values = NULL, *valueseq = NULL; struct ddb_cons *ddb_cons = NULL; struct ddb_entry *kentry = NULL, *ventry = NULL; uint64_t n, flags = 0, disable_compression = 0, unique_items = 0; static char *kwlist[] = {"arg", "disable_compression", "unique_items", NULL}; if (self != NULL) { if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OII", kwlist, &arg, &disable_compression, &unique_items)) goto Done; if (disable_compression) flags |= DDB_OPT_DISABLE_COMPRESSION; if (unique_items) flags |= DDB_OPT_UNIQUE_ITEMS; if (arg == NULL) /* null constructor */ items = PyTuple_New(0); else if (PyMapping_Check(arg)) /* copy constructor */ items = PyMapping_Items(arg); else /* iter constructor */ Py_INCREF(items = arg); iteritems = PyObject_GetIter(items); if (iteritems == NULL) goto Done; ddb_cons = ddb_cons_alloc(); if (ddb_cons == NULL) goto Done; while ((item = PyIter_Next(iteritems))) { kentry = ddb_entry_alloc(1); if (kentry == NULL) goto Done; if (!PyArg_ParseTuple(item, "s#O", &kentry->data, &kentry->length, &values)) goto Done; Py_XINCREF(values); if (values == NULL) values = PyTuple_New(0); if (PyString_Check(values)) valueseq = Py_BuildValue("(O)", values); else Py_XINCREF(valueseq = values); if (valueseq == NULL) goto Done; itervalues = PyObject_GetIter(valueseq); if (itervalues == NULL) goto Done; for (n = 0; (value = PyIter_Next(itervalues)); n++) { ventry = ddb_entry_alloc(1); if (ventry == NULL) goto Done; vpack = Py_BuildValue("(O)", value); if (vpack == NULL) goto Done; if (!PyArg_ParseTuple(vpack, "s#", &ventry->data, &ventry->length)) goto Done; if (ddb_add(ddb_cons, kentry, ventry)) { PyErr_SetString(DiscoDBError, "Construction failed"); goto Done; } Py_CLEAR(vpack); Py_CLEAR(value); DiscoDB_CLEAR(ventry); } if (n == 0) if (ddb_add(ddb_cons, kentry, NULL)) { PyErr_SetString(DiscoDBError, "Construction failed"); goto Done; } Py_CLEAR(itervalues); Py_CLEAR(item); Py_CLEAR(values); Py_CLEAR(valueseq); DiscoDB_CLEAR(kentry); } } self->obuffer = NULL; self->cbuffer = ddb_finalize(ddb_cons, &n, flags); if (self->cbuffer == NULL) { PyErr_SetString(DiscoDBError, "Construction finalization failed"); goto Done; } self->discodb = ddb_alloc(); if (self->discodb == NULL) goto Done; if (ddb_loads(self->discodb, self->cbuffer, n)) if (ddb_has_error(self->discodb)) goto Done; Done: ddb_cons_dealloc(ddb_cons); Py_CLEAR(item); Py_CLEAR(items); Py_CLEAR(iteritems); Py_CLEAR(itervalues); Py_CLEAR(vpack); Py_CLEAR(value); Py_CLEAR(values); Py_CLEAR(valueseq); DiscoDB_CLEAR(kentry); DiscoDB_CLEAR(ventry); if (PyErr_Occurred()) { Py_CLEAR(self); return NULL; } return (PyObject *)self; }
PyObject *glue_PyMapping_Items(PyObject *o) { return PyMapping_Items(o); }
static PyObject * nw_align_wrapper(PyObject *self, PyObject *args, PyObject *kw) { const char *seq1, *seq2; // Decide on scoring int match = 1; int mismatch = -2; int gap_open = -4; int gap_extend = -1; // Don't penalise gaps at the start // ACGATTT // ----TTT would score +3 (when match=+1) int no_start_gap_penalty = 0; // ..or gaps at the end e.g. // ACGATTT // ACGA--- would score +4 (when match=+1) int no_end_gap_penalty = 0; int no_gaps_in_a = 0, no_gaps_in_b = 0; int no_mismatches = 0; // Compare character case-sensitively (usually set to 0 for DNA etc) int case_sensitive = 0; PyObject * matrix = NULL; static char *kwlist[] = {"seq1","seq2", "matrix", "match", "mismatch", "gap_open","gap_extend", "no_start_gap_penalty", "no_end_gap_penalty", "no_gaps_in_a", "no_gaps_in_b", "no_mismatches", "case_sensitive", NULL}; PyObject *res = NULL; if(!PyArg_ParseTupleAndKeywords(args, kw, "ss|Oiiiiiiiiii", kwlist, &seq1, &seq2, &matrix, &match, &mismatch, &gap_open, &gap_extend, &no_start_gap_penalty, &no_end_gap_penalty, &no_gaps_in_a, &no_gaps_in_b, &no_mismatches, &case_sensitive)) return NULL; alignment_t *result = alignment_create(256); // Variables to store alignment result nw_aligner_t *nw = needleman_wunsch_new(); scoring_t scoring; scoring_init(&scoring, match, mismatch, gap_open, gap_extend, no_start_gap_penalty, no_end_gap_penalty, no_gaps_in_a, no_gaps_in_b, no_mismatches, case_sensitive); // Add some special cases // x -> y means x in seq1 changing to y in seq2 if(matrix != NULL) { PyObject * mapping = PyMapping_Items(matrix); if(mapping == NULL) goto error; int n = PySequence_Size(mapping); PyObject *item; int value; PyObject *key; char * char_a; char * char_b; int i; for(i = 0; i < n; i++) { item = PySequence_GetItem(mapping, i); if(item == NULL || !PyTuple_Check(item)) { Py_XDECREF(item); Py_DECREF(mapping); goto error; } if(!PyArg_ParseTuple(item, "Oi", &key, &value)) { PyErr_SetString(PyExc_RuntimeError, "Values of matrix dict should be integers"); Py_XDECREF(item); Py_DECREF(mapping); goto error; } if(!PyTuple_Check(key)) { PyErr_SetString(PyExc_RuntimeError, "Keys of matrix dict should be tuples"); Py_XDECREF(item); Py_DECREF(mapping); goto error; } if(!PyArg_ParseTuple(key, "ss", &char_a, &char_b)) { PyErr_SetString(PyExc_RuntimeError, "Keys of matrix dict should be tuples with 2 characters as elements."); Py_XDECREF(item); Py_DECREF(mapping); goto error; } if(strlen(char_a) != 1 || strlen(char_b) != 1) { PyErr_SetString(PyExc_RuntimeError, "Character length should be 1"); Py_XDECREF(item); Py_DECREF(mapping); goto error; } scoring_add_mutation(&scoring, case_sensitive ? *char_a : tolower(*char_a), case_sensitive ? *char_a : tolower(*char_b), value); // a -> c give substitution score -2 Py_DECREF(item); } } // We could also prohibit the aligning of characters not given as special cases // scoring.use_match_mismatch = 0; needleman_wunsch_align(seq1, seq2, &scoring, nw, result); res = Py_BuildValue("ssi", result->result_a, result->result_b, result->score); error: // Free memory for storing alignment results needleman_wunsch_free(nw); alignment_free(result); return res; }
dbtype_t from_python(pgctx_t *ctx, PyObject *ob) { dbtype_t db; char *buf; Py_ssize_t length; PyObject *items; struct tm tm; long usec; //int i; if (PyObject_HasAttrString(ob, "__topongo__")) { ob = PyObject_CallMethod(ob, "__topongo__", NULL); if (PyErr_Occurred()) return DBNULL; } if (ob == Py_None) { db = DBNULL; } else if (ob == pongo_id) { db = dbuuid_new(ctx, NULL); } else if (ob == pongo_utcnow) { db = dbtime_now(ctx); } else if (PyBool_Check(ob)) { db = dbboolean_new(ctx, ob == Py_True); } else if (PyInt_Check(ob)) { db = dbint_new(ctx, PyInt_AsLong(ob)); } else if (PyLong_Check(ob)) { db = dbint_new(ctx, PyLong_AsLongLong(ob)); } else if (PyFloat_Check(ob)) { db = dbfloat_new(ctx, PyFloat_AsDouble(ob)); } else if (PyString_Check(ob)) { PyString_AsStringAndSize(ob, &buf, &length); // FIXME: //db = dbbuffer_new(ctx, buf, length); db = dbstring_new(ctx, buf, length); } else if (PyUnicode_Check(ob)) { ob = PyUnicode_AsUTF8String(ob); if (ob) { PyString_AsStringAndSize(ob, &buf, &length); db = dbstring_new(ctx, buf, length); Py_DECREF(ob); } } else if (PyDateTime_Check(ob)) { memset(&tm, 0, sizeof(tm)); tm.tm_year = PyDateTime_GET_YEAR(ob); tm.tm_mon = PyDateTime_GET_MONTH(ob); tm.tm_mday = PyDateTime_GET_DAY(ob); tm.tm_hour = PyDateTime_DATE_GET_HOUR(ob); tm.tm_min = PyDateTime_DATE_GET_MINUTE(ob); tm.tm_sec = PyDateTime_DATE_GET_SECOND(ob); usec = PyDateTime_DATE_GET_MICROSECOND(ob); tm.tm_year -= 1900; db = dbtime_newtm(ctx, &tm, usec); #ifdef WANT_UUID_TYPE } else if (PyObject_TypeCheck(ob, uuid_class)) { ob = PyObject_CallMethod(ob, "get_bytes", NULL); PyString_AsStringAndSize(ob, &buf, &length); db = dbuuid_new(ctx, (uint8_t*)buf); #endif } else if (Py_TYPE(ob) == &PongoList_Type) { // Resolve proxy types back to their original dbtype PongoList *p = (PongoList*)ob; db = p->dbptr; } else if (Py_TYPE(ob) == &PongoDict_Type) { // Resolve proxy types back to their original dbtype PongoDict *p = (PongoDict*)ob; db = p->dbptr; } else if (Py_TYPE(ob) == &PongoCollection_Type) { // Resolve proxy types back to their original dbtype PongoCollection *p = (PongoCollection*)ob; db = p->dbptr; } else if (PyMapping_Check(ob)) { length = PyMapping_Length(ob); items = PyMapping_Items(ob); if (items) { // mapping object implements "items" db = dbobject_new(ctx); dbobject_update(ctx, db, length, _py_mapping_cb, items, NOSYNC); Py_XDECREF(items); } else { // mapping object implements iterator protocol // don't have to decref the iterator object cuz it self-decrefs // upon StopIteration PyErr_Clear(); items = PyObject_GetIter(ob); db = dbobject_new(ctx); dbobject_update(ctx, db, length, _py_itermapping_cb, items, NOSYNC); } } else if (PySequence_Check(ob)) { length = PySequence_Length(ob); db = dblist_new(ctx); dblist_extend(ctx, db, length, _py_sequence_cb, ob, NOSYNC); } else { // FIXME: Unknown object type PyErr_SetObject(PyExc_TypeError, (PyObject*)Py_TYPE(ob)); db = DBNULL; } return db; }
/** Translate python list of tuples to Config * * [(K,V)] -> Config * float -> double * str -> string * [[()]] -> vector<Config> (recurse) * ndarray -> vector<double> * TODO: [0.0] -> vector<double> */ void List2Config(Config& ret, PyObject *list, unsigned depth) { if(depth>3) throw std::runtime_error("too deep for Dict2Config"); PyRef<> iter(PyObject_GetIter(list)); while(true) { PyObject *item = PyIter_Next(iter.py()); if(!item) break; PyRef<> itemref(item); const char *kname; PyObject *value; if(!PyArg_ParseTuple(item, "sO", &kname, &value)) throw std::runtime_error("list item is not a tuple?"); if(PyArray_Check(value)) { // array as vector<double> PyRef<> arr(PyArray_ContiguousFromAny(value, NPY_DOUBLE, 0, 2)); double *buf = (double*)PyArray_DATA(arr.py()); std::vector<double> temp(PyArray_SIZE(arr.py())); std::copy(buf, buf+temp.size(), temp.begin()); ret.swap<std::vector<double> >(kname, temp); } else if(PyNumber_Check(value)) { // scalar as double PyRef<> dval(PyNumber_Float(value)); double val = PyFloat_AsDouble(dval.py()); ret.set<double>(kname, val); } else if(PyUnicode_Check(value) || (PY_MAJOR_VERSION < 3 && PyBytes_Check(value))) { // string PyRef<> valref(value, borrow()); PyCString sval(valref); const char *val = sval.c_str(); ret.set<std::string>(kname, val); } else if(PySequence_Check(value)) { // list of dict Py_ssize_t N = PySequence_Size(value); Config::vector_t output; output.reserve(N); for(Py_ssize_t i=0; i<N; i++) { PyRef<> elem(PySequence_GetItem(value, i)); if(PyDict_Check(elem.py())) { elem.reset(PyMapping_Items(elem.py())); } if(!PyList_Check(elem.py())) { PyTypeObject *valuetype = (PyTypeObject*)PyObject_Type(elem.py()); throw std::invalid_argument(SB()<<"lists must contain only dict or list of tuples, not "<<valuetype->tp_name); } output.push_back(ret.new_scope()); List2Config(output.back(), elem.py(), depth+1); // inheirt parent scope } ret.set<Config::vector_t>(kname, output); } else { PyTypeObject *valuetype = (PyTypeObject*)PyObject_Type(value); throw std::invalid_argument(SB()<<"Must be a dict, not "<<valuetype->tp_name); } } }
Config* PyGLPSParse2Config(PyObject *, PyObject *args, PyObject *kws) { PyObject *conf = NULL, *extra_defs = Py_None; const char *path = NULL; const char *pnames[] = {"config", "path", "extra", NULL}; if(!PyArg_ParseTupleAndKeywords(args, kws, "O|zO", (char**)pnames, &conf, &path, &extra_defs)) return NULL; GLPSParser parser; if(extra_defs==Py_None) { // no-op } else if(PyDict_Check(extra_defs)) { PyObject *key, *value; Py_ssize_t pos = 0; while(PyDict_Next(extra_defs, &pos, &key, &value)) { PyRef<> keyx(key, borrow()); PyCString keystr(keyx); Config::value_t curval; if(PyNumber_Check(value)) { PyRef<> pyf(PyNumber_Float(value)); curval = PyFloat_AsDouble(pyf.py()); } else if(PyString_Check(value)) { PyRef<> valuex(value, borrow()); PyCString valstr(valuex); curval = valstr.c_str(); } else { PyErr_SetString(PyExc_ValueError, "extra {} can contain only numbers or strings"); return NULL; } parser.setVar(keystr.c_str(), curval); } } else { PyErr_SetString(PyExc_ValueError, "'extra' must be a dict"); return NULL; } PyGetBuf buf; std::auto_ptr<Config> C; PyRef<> listref; if(PyObject_HasAttrString(conf, "read")) { // file-like PyCString pyname; if(!path && PyObject_HasAttrString(conf, "name")) { path = pyname.c_str(pydirname(PyObject_GetAttrString(conf, "name"))); } PyRef<> pybytes(PyObject_CallMethod(conf, "read", "")); if(!buf.get(pybytes.py())) { PyErr_SetString(PyExc_TypeError, "read() must return a buffer"); return NULL; } C.reset(parser.parse_byte((const char*)buf.data(), buf.size(), path)); } else if(buf.get(conf)) { C.reset(parser.parse_byte((const char*)buf.data(), buf.size(), path)); #if PY_MAJOR_VERSION >= 3 } else if(PyUnicode_Check(conf)) { // py3 str (aka unicode) doesn't implement buffer iface PyCString buf; const char *cbuf = buf.c_str(conf); C.reset(parser.parse_byte(cbuf, strlen(cbuf), path)); #endif } else { if(PyDict_Check(conf)) { listref.reset(PyMapping_Items(conf)); conf = listref.py(); } if(PyList_Check(conf)) { C.reset(list2conf(conf)); } else { throw std::invalid_argument("'config' must be dict, list of tuples, or byte buffer"); } } return C.release(); }