PyObject * PyBuffer_New(Py_ssize_t size) { PyObject *o; PyBufferObject * b; if (size < 0) { PyErr_SetString(PyExc_ValueError, "size must be zero or positive"); return NULL; } if (sizeof(*b) > PY_SSIZE_T_MAX - size) { /* unlikely */ return PyErr_NoMemory(); } /* Inline PyObject_New */ o = (PyObject *)PyObject_MALLOC(sizeof(*b) + size); if ( o == NULL ) return PyErr_NoMemory(); b = (PyBufferObject *) PyObject_INIT(o, &PyBuffer_Type); b->b_base = NULL; b->b_ptr = (void *)(b + 1); b->b_size = size; b->b_offset = 0; b->b_readonly = 0; b->b_hash = -1; return o; }
PyObject * PyBuffer_New(int size) { PyObject *o; PyBufferObject * b; if (size < 0) { PyErr_SetString(PyExc_ValueError, "size must be zero or positive"); return NULL; } /* PyObject_New is inlined */ o = PyObject_MALLOC(sizeof(*b) + size); if ( o == NULL ) return PyErr_NoMemory(); b = (PyBufferObject *) PyObject_INIT(o, &PyBuffer_Type); b->b_base = NULL; b->b_ptr = (void *)(b + 1); b->b_size = size; b->b_readonly = 0; #ifdef CACHE_HASH b->b_hash = -1; #endif return o; }
static PyObject* QtGuiObject_alloc(PyTypeObject *type, Py_ssize_t nitems) { PyObject *obj; const size_t size = _PyObject_VAR_SIZE(type, nitems+1); /* note that we need to add one, for the sentinel */ if (PyType_IS_GC(type)) obj = _PyObject_GC_Malloc(size); else obj = (PyObject *)PyObject_MALLOC(size); if (obj == NULL) return PyErr_NoMemory(); // This is why we need this custom alloc: To call the C++ constructor. memset(obj, '\0', size); new ((PyQtGuiObject*) obj) PyQtGuiObject(); if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) Py_INCREF(type); if (type->tp_itemsize == 0) PyObject_INIT(obj, type); else (void) PyObject_INIT_VAR((PyVarObject *)obj, type, nitems); if (PyType_IS_GC(type)) _PyObject_GC_TRACK(obj); return obj; }
PyObject * _PyObject_GC_Malloc(size_t basicsize) { PyObject *op; PyGC_Head *g; if (basicsize > PY_SSIZE_T_MAX - sizeof(PyGC_Head)) return PyErr_NoMemory(); g = (PyGC_Head *)PyObject_MALLOC( sizeof(PyGC_Head) + basicsize); if (g == NULL) return PyErr_NoMemory(); g->gc.gc_refs = GC_UNTRACKED; generations[0].count++; /* number of allocated GC objects */ if (generations[0].count > generations[0].threshold && enabled && generations[0].threshold && !collecting && !PyErr_Occurred()) { collecting = 1; collect_generations(); collecting = 0; } op = FROM_GC(g); return op; }
char * PyTokenizer_RestoreEncoding(struct tok_state* tok, int len, int *offset) { char *text = NULL; if (tok->encoding) { /* convert source to original encondig */ PyObject *lineobj = dec_utf8(tok->encoding, tok->buf, len); if (lineobj != NULL) { int linelen = PyString_Size(lineobj); const char *line = PyString_AsString(lineobj); text = PyObject_MALLOC(linelen + 1); if (text != NULL && line != NULL) { if (linelen) strncpy(text, line, linelen); text[linelen] = '\0'; } Py_DECREF(lineobj); /* adjust error offset */ if (*offset > 1) { PyObject *offsetobj = dec_utf8(tok->encoding, tok->buf, *offset-1); if (offsetobj) { *offset = PyString_Size(offsetobj) + 1; Py_DECREF(offsetobj); } } } } return text; }
PyObject * _PyObject_New(PyTypeObject *tp) { PyObject *op; op = (PyObject *) PyObject_MALLOC(_PyObject_SIZE(tp)); if (op == NULL) return PyErr_NoMemory(); return PyObject_INIT(op, tp); }
PyVarObject * _PyObject_NewVar(PyTypeObject *tp, int nitems) { PyVarObject *op; const size_t size = _PyObject_VAR_SIZE(tp, nitems); op = (PyVarObject *) PyObject_MALLOC(size); if (op == NULL) return (PyVarObject *)PyErr_NoMemory(); return PyObject_INIT_VAR(op, tp, nitems); }
/* Efficiently allocate a python string of a fixed size containing uninitialized memory */ static PyObject * lowLevelStringAlloc(Py_ssize_t size) { PyStringObject * op = (PyStringObject *)PyObject_MALLOC(sizeof(PyStringObject) + size); if (op) { PyObject_INIT_VAR(op, &PyString_Type, size); op->ob_shash = -1; op->ob_sstate = SSTATE_NOT_INTERNED; } return (PyObject *) op; }
PyObject* PyThunk_FromOperation(PyObject *operation, ssize_t cardinality, int cardinality_type, int type) { register PyThunkObject *thunk; thunk = (PyThunkObject *)PyObject_MALLOC(sizeof(PyThunkObject)); if (thunk == NULL) return PyErr_NoMemory(); PyObject_Init((PyObject*)thunk, &PyThunk_Type); PyThunk_FromOperation_Inplace(thunk, operation, cardinality, cardinality_type, type); return (PyObject*)thunk; }
PyObject* PyThunkUnaryPipeline_FromFunction(UnaryPipelineFunction function, PyObject *left) { PyThunkOperation_UnaryPipeline *op = PyObject_MALLOC(sizeof(PyThunkOperation_UnaryPipeline)); if (op == NULL) return PyErr_NoMemory(); PyObject_Init((PyObject*)op, &PyThunkUnaryPipeline_Type); op->function = function; Py_XINCREF(left); op->left = left; return (PyObject*)op; }
node * PyNode_New(int type) { node *n = (node *) PyObject_MALLOC(1 * sizeof(node)); if (n == NULL) return NULL; n->n_type = type; n->n_str = NULL; n->n_lineno = 0; n->n_nchildren = 0; n->n_child = NULL; return n; }
PyObject * PyComplex_FromCComplex(Py_complex cval) { register PyComplexObject *op; /* Inline PyObject_New */ op = (PyComplexObject *) PyObject_MALLOC(sizeof(PyComplexObject)); if (op == NULL) return PyErr_NoMemory(); PyObject_INIT(op, &PyComplex_Type); op->cval = cval; return (PyObject *) op; }
PyObject* string_repeat(register PyStringObject *a, register Py_ssize_t n) { register Py_ssize_t i; register Py_ssize_t j; register Py_ssize_t size; register PyStringObject *op; size_t nbytes; if (n < 0) n = 0; /* watch out for overflows: the size can overflow int, * and the # of bytes needed can overflow size_t */ size = Py_SIZE(a) * n; if (n && size / n != Py_SIZE(a)) { PyErr_SetString(PyExc_OverflowError, "repeated string is too long"); return NULL; } if (size == Py_SIZE(a) && PyString_CheckExact(a)) { Py_INCREF(a); return (PyObject *)a; } nbytes = (size_t)size; if (nbytes + PyStringObject_SIZE <= nbytes) { PyErr_SetString(PyExc_OverflowError, "repeated string is too long"); return NULL; } op = (PyStringObject *)PyObject_MALLOC(PyStringObject_SIZE + nbytes); if (op == NULL) return PyErr_NoMemory(); PyObject_INIT_VAR(op, &PyString_Type, size); op->ob_shash = -1; op->ob_sstate = SSTATE_NOT_INTERNED; op->ob_sval[size] = '\0'; if (Py_SIZE(a) == 1 && n > 0) { memset(op->ob_sval, a->ob_sval[0] , n); return (PyObject *) op; } i = 0; if (i < size) { Py_MEMCPY(op->ob_sval, a->ob_sval, Py_SIZE(a)); i = Py_SIZE(a); } while (i < size) { j = (i <= size-i) ? i : size-i; Py_MEMCPY(op->ob_sval+i, op->ob_sval, j); i += j; } return (PyObject *) op; }
static Context *Context_New(PyObject *elementType) { Context *self; self = (Context *) PyObject_MALLOC(sizeof(Context)); if (self == NULL) { PyErr_NoMemory(); return NULL; } memset(self, 0, sizeof(Context)); self->element = elementType; return self; }
bitset newbitset(int nbits) { int nbytes = NBYTES(nbits); bitset ss = (char *)PyObject_MALLOC(sizeof(BYTE) * nbytes); if (ss == NULL) Py_FatalError("no mem for bitset"); ss += nbytes; while (--nbytes >= 0) *--ss = 0; return ss; }
static nfagrammar * newnfagrammar(void) { nfagrammar *gr; gr = (nfagrammar *)PyObject_MALLOC(sizeof(nfagrammar)); if (gr == NULL) Py_FatalError("no mem for new nfa grammar"); gr->gr_nnfas = 0; gr->gr_nfa = NULL; gr->gr_ll.ll_nlabels = 0; gr->gr_ll.ll_label = NULL; addlabel(&gr->gr_ll, ENDMARKER, "EMPTY"); return gr; }
node * PyNode_New(int type) { node *n = (node *) PyObject_MALLOC(1 * sizeof(node)); if (n == NULL) return NULL; n->n_type = type; n->n_str = NULL; n->n_lineno = 0; n->n_nchildren = 0; n->n_child = NULL; // Print statement modification: Initialize flag n->n_wasModified = false; return n; }
grammar * newgrammar(int start) { grammar *g; g = (grammar *)PyObject_MALLOC(sizeof(grammar)); if (g == NULL) Py_FatalError("no mem for new grammar"); g->g_ndfas = 0; g->g_dfa = NULL; g->g_start = start; g->g_ll.ll_nlabels = 0; g->g_ll.ll_label = NULL; g->g_accel = 0; return g; }
static nfa * newnfa(char *name) { nfa *nf; static int type = NT_OFFSET; /* All types will be disjunct */ nf = (nfa *)PyObject_MALLOC(sizeof(nfa)); if (nf == NULL) Py_FatalError("no mem for new nfa"); nf->nf_type = type++; nf->nf_name = name; /* XXX strdup(name) ??? */ nf->nf_nstates = 0; nf->nf_state = NULL; nf->nf_start = nf->nf_finish = -1; return nf; }
PyObject * PyString_FromStringAndSize(const char *str, int size) { LOG("> PyString_FromStringAndSize\n"); { register PyStringObject *op; if (size == 0 && (op = nullstring) != NULL) { Py_INCREF(op); LOG("< PyString_FromStringAndSize\n"); return (PyObject *)op; } if (size == 1 && str != NULL && (op = characters[*str & UCHAR_MAX]) != NULL) { Py_INCREF(op); LOG("< PyString_FromStringAndSize\n"); return (PyObject *)op; } /* Inline PyObject_NewVar */ op = (PyStringObject *) PyObject_MALLOC(sizeof(PyStringObject) + size * sizeof(char)); if (op == NULL) return NULL; /* NO MEM */ PyObject_INIT_VAR(op, &PyString_Type, size); op->ob_shash = -1; op->ob_sstate = SSTATE_NOT_INTERNED; if (str != NULL) memcpy(op->ob_sval, str, size); op->ob_sval[size] = '\0'; /* share short strings */ if (size == 0) { PyObject *t = (PyObject *)op; PyString_InternInPlace(&t); op = (PyStringObject *)t; nullstring = op; Py_INCREF(op); } else if (size == 1 && str != NULL) { PyObject *t = (PyObject *)op; PyString_InternInPlace(&t); op = (PyStringObject *)t; characters[*str & UCHAR_MAX] = op; Py_INCREF(op); } LOG("< PyString_FromStringAndSize\n"); return (PyObject *) op; }}
PyObject* PyThunk_Copy(PyThunkObject *original) { register PyThunkObject *thunk; thunk = (PyThunkObject *)PyObject_MALLOC(sizeof(PyThunkObject)); if (thunk == NULL) return PyErr_NoMemory(); PyObject_Init((PyObject*)thunk, &PyThunk_Type); thunk->storage = original->storage; thunk->evaluated = original->evaluated; thunk->operation = original->operation; thunk->cardinality = original->cardinality; thunk->type = original->type; thunk->options = original->options; thunk->blockmask = original->blockmask; return (PyObject*)thunk; }
PyObject * PyString_FromString(const char *str) { register size_t size; register PyStringObject *op; size = strlen(str); if (size == 0 && (op = nullstring) != NULL) { Py_INCREF(op); return (PyObject *)op; } if (size == 1 && (op = characters[*str & UCHAR_MAX]) != NULL) { Py_INCREF(op); return (PyObject *)op; } op = (PyStringObject *) PyObject_MALLOC(sizeof(PyStringObject) + size * sizeof(char)); if (op == NULL) Py_FatalError("no memory"); PyObject_INIT_VAR(op, &PyString_Type, size); op->ob_shash = -1; op->ob_sstate = SSTATE_NOT_INTERNED; memcpy(op->ob_sval, str, size+1); if (size == 0) { PyObject *t = (PyObject *)op; PyString_InternInPlace(&t); op = (PyStringObject *)t; nullstring = op; Py_INCREF(op); } else if (size == 1) { PyObject *t = (PyObject *)op; PyString_InternInPlace(&t); op = (PyStringObject *)t; characters[*str & UCHAR_MAX] = op; Py_INCREF(op); } return (PyObject *) op; }
PyObject* PyThunk_FromArray(PyObject *unused, PyObject *input) { register PyThunkObject *thunk; (void) unused; input = PyArray_FromAny(input, NULL, 0, 0, NPY_ARRAY_ENSURECOPY, NULL); if (input == NULL || !PyArray_CheckExact(input)) { PyErr_SetString(PyExc_TypeError, "Expected a NumPy array as parameter."); return NULL; } thunk = (PyThunkObject *)PyObject_MALLOC(sizeof(PyThunkObject)); if (thunk == NULL) return PyErr_NoMemory(); PyObject_Init((PyObject*)thunk, &PyThunk_Type); thunk->storage = (PyArrayObject*) input; thunk->evaluated = true; thunk->operation = NULL; thunk->cardinality = PyArray_SIZE(thunk->storage); thunk->type = PyArray_TYPE(thunk->storage); thunk->options = THUNK_CARDINALITY_EXACT; thunk->blockmask = NULL; return (PyObject*)thunk; }
static node * parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret, int *flags) { parser_state *ps; node *n; int started = 0; if ((ps = PyParser_New(g, start)) == NULL) { err_ret->error = E_NOMEM; PyTokenizer_Free(tok); return NULL; } #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD if (*flags & PyPARSE_BARRY_AS_BDFL) ps->p_flags |= CO_FUTURE_BARRY_AS_BDFL; #endif for (;;) { char *a, *b; int type; size_t len; char *str; int col_offset; type = PyTokenizer_Get(tok, &a, &b); if (type == ERRORTOKEN) { err_ret->error = tok->done; break; } if (type == ENDMARKER && started) { type = NEWLINE; /* Add an extra newline */ started = 0; /* Add the right number of dedent tokens, except if a certain flag is given -- codeop.py uses this. */ if (tok->indent && !(*flags & PyPARSE_DONT_IMPLY_DEDENT)) { tok->pendin = -tok->indent; tok->indent = 0; } } else started = 1; len = b - a; /* XXX this may compute NULL - NULL */ str = (char *) PyObject_MALLOC(len + 1); if (str == NULL) { err_ret->error = E_NOMEM; break; } if (len > 0) strncpy(str, a, len); str[len] = '\0'; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD if (type == NOTEQUAL) { if (!(ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) && strcmp(str, "!=")) { PyObject_FREE(str); err_ret->error = E_SYNTAX; break; } else if ((ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) && strcmp(str, "<>")) { PyObject_FREE(str); err_ret->text = "with Barry as BDFL, use '<>' " "instead of '!='"; err_ret->error = E_SYNTAX; break; } } #endif if (a >= tok->line_start) col_offset = a - tok->line_start; else col_offset = -1; if ((err_ret->error = PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset, &(err_ret->expected))) != E_OK) { if (err_ret->error != E_DONE) { PyObject_FREE(str); err_ret->token = type; } break; } } if (err_ret->error == E_DONE) { n = ps->p_tree; ps->p_tree = NULL; #ifndef PGEN /* Check that the source for a single input statement really is a single statement by looking at what is left in the buffer after parsing. Trailing whitespace and comments are OK. */ if (start == single_input) { char *cur = tok->cur; char c = *tok->cur; for (;;) { while (c == ' ' || c == '\t' || c == '\n' || c == '\014') c = *++cur; if (!c) break; if (c != '#') { err_ret->error = E_BADSINGLE; PyNode_Free(n); n = NULL; break; } /* Suck up comment. */ while (c && c != '\n') c = *++cur; } } #endif } else n = NULL; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD *flags = ps->p_flags; #endif PyParser_Delete(ps); if (n == NULL) { if (tok->done == E_EOF) err_ret->error = E_EOF; err_ret->lineno = tok->lineno; if (tok->buf != NULL) { size_t len; assert(tok->cur - tok->buf < INT_MAX); err_ret->offset = (int)(tok->cur - tok->buf); len = tok->inp - tok->buf; err_ret->text = (char *) PyObject_MALLOC(len + 1); if (err_ret->text != NULL) { if (len > 0) strncpy(err_ret->text, tok->buf, len); err_ret->text[len] = '\0'; } } } else if (tok->encoding != NULL) { /* 'nodes->n_str' uses PyObject_*, while 'tok->encoding' was * allocated using PyMem_ */ node* r = PyNode_New(encoding_decl); if (r) r->n_str = PyObject_MALLOC(strlen(tok->encoding)+1); if (!r || !r->n_str) { err_ret->error = E_NOMEM; if (r) PyObject_FREE(r); n = NULL; goto done; } strcpy(r->n_str, tok->encoding); PyMem_FREE(tok->encoding); tok->encoding = NULL; r->n_nchildren = 1; r->n_child = n; n = r; } done: PyTokenizer_Free(tok); return n; }
PyFrameObject * PyFrame_New(PyThreadState *tstate, PyCodeObject *code, PyObject *globals, PyObject *locals) { PyFrameObject *back = tstate->frame; static PyObject *builtin_object; PyFrameObject *f; PyObject *builtins; int extras; if (builtin_object == NULL) { builtin_object = PyString_InternFromString("__builtins__"); if (builtin_object == NULL) return NULL; } if ((back != NULL && !PyFrame_Check(back)) || code == NULL || !PyCode_Check(code) || globals == NULL || !PyDict_Check(globals) || (locals != NULL && !PyDict_Check(locals))) { PyErr_BadInternalCall(); return NULL; } extras = code->co_stacksize + code->co_nlocals; if (back == NULL || back->f_globals != globals) { builtins = PyDict_GetItem(globals, builtin_object); if (builtins != NULL && PyModule_Check(builtins)) builtins = PyModule_GetDict(builtins); } else { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; } if (builtins != NULL && !PyDict_Check(builtins)) builtins = NULL; if (free_list == NULL) { /* PyObject_New is inlined */ f = (PyFrameObject *) PyObject_MALLOC(sizeof(PyFrameObject) + extras*sizeof(PyObject *)); if (f == NULL) return (PyFrameObject *)PyErr_NoMemory(); PyObject_INIT(f, &PyFrame_Type); } else { f = free_list; free_list = free_list->f_back; if (f->f_nlocals + f->f_stacksize < extras) { f = (PyFrameObject *) PyObject_REALLOC(f, sizeof(PyFrameObject) + extras*sizeof(PyObject *)); if (f == NULL) return (PyFrameObject *)PyErr_NoMemory(); } else extras = f->f_nlocals + f->f_stacksize; PyObject_INIT(f, &PyFrame_Type); } if (builtins == NULL) { /* No builtins! Make up a minimal one. */ builtins = PyDict_New(); if (builtins == NULL || /* Give them 'None', at least. */ PyDict_SetItemString(builtins, "None", Py_None) < 0) { Py_DECREF(f); return NULL; } } else Py_XINCREF(builtins); f->f_builtins = builtins; Py_XINCREF(back); f->f_back = back; Py_INCREF(code); f->f_code = code; Py_INCREF(globals); f->f_globals = globals; if (code->co_flags & CO_NEWLOCALS) { if (code->co_flags & CO_OPTIMIZED) locals = NULL; /* Let fast_2_locals handle it */ else { locals = PyDict_New(); if (locals == NULL) { Py_DECREF(f); return NULL; } } } else { if (locals == NULL) locals = globals; Py_INCREF(locals); } f->f_locals = locals; f->f_trace = NULL; f->f_exc_type = f->f_exc_value = f->f_exc_traceback = NULL; f->f_tstate = tstate; f->f_lasti = 0; f->f_lineno = code->co_firstlineno; f->f_restricted = (builtins != tstate->interp->builtins); f->f_iblock = 0; f->f_nlocals = code->co_nlocals; f->f_stacksize = extras - code->co_nlocals; while (--extras >= 0) f->f_localsplus[extras] = NULL; f->f_valuestack = f->f_localsplus + f->f_nlocals; return f; }
static void makedfa(nfagrammar *gr, nfa *nf, dfa *d) { int nbits = nf->nf_nstates; bitset ss; int xx_nstates; ss_state *xx_state, *yy; ss_arc *zz; int istate, jstate, iarc, jarc, ibit; nfastate *st; nfaarc *ar; ss = newbitset(nbits); addclosure(ss, nf, nf->nf_start); xx_state = (ss_state *)PyObject_MALLOC(sizeof(ss_state)); if (xx_state == NULL) Py_FatalError("no mem for xx_state in makedfa"); xx_nstates = 1; yy = &xx_state[0]; yy->ss_ss = ss; yy->ss_narcs = 0; yy->ss_arc = NULL; yy->ss_deleted = 0; yy->ss_finish = testbit(ss, nf->nf_finish); if (yy->ss_finish) printf("Error: nonterminal '%s' may produce empty.\n", nf->nf_name); /* This algorithm is from a book written before the invention of structured programming... */ /* For each unmarked state... */ for (istate = 0; istate < xx_nstates; ++istate) { size_t size; yy = &xx_state[istate]; ss = yy->ss_ss; /* For all its states... */ for (ibit = 0; ibit < nf->nf_nstates; ++ibit) { if (!testbit(ss, ibit)) continue; st = &nf->nf_state[ibit]; /* For all non-empty arcs from this state... */ for (iarc = 0; iarc < st->st_narcs; iarc++) { ar = &st->st_arc[iarc]; if (ar->ar_label == EMPTY) continue; /* Look up in list of arcs from this state */ for (jarc = 0; jarc < yy->ss_narcs; ++jarc) { zz = &yy->ss_arc[jarc]; if (ar->ar_label == zz->sa_label) goto found; } /* Add new arc for this state */ size = sizeof(ss_arc) * (yy->ss_narcs + 1); yy->ss_arc = (ss_arc *)PyObject_REALLOC( yy->ss_arc, size); if (yy->ss_arc == NULL) Py_FatalError("out of mem"); zz = &yy->ss_arc[yy->ss_narcs++]; zz->sa_label = ar->ar_label; zz->sa_bitset = newbitset(nbits); zz->sa_arrow = -1; found: ; /* Add destination */ addclosure(zz->sa_bitset, nf, ar->ar_arrow); } } /* Now look up all the arrow states */ for (jarc = 0; jarc < xx_state[istate].ss_narcs; jarc++) { zz = &xx_state[istate].ss_arc[jarc]; for (jstate = 0; jstate < xx_nstates; jstate++) { if (samebitset(zz->sa_bitset, xx_state[jstate].ss_ss, nbits)) { zz->sa_arrow = jstate; goto done; } } size = sizeof(ss_state) * (xx_nstates + 1); xx_state = (ss_state *)PyObject_REALLOC(xx_state, size); if (xx_state == NULL) Py_FatalError("out of mem"); zz->sa_arrow = xx_nstates; yy = &xx_state[xx_nstates++]; yy->ss_ss = zz->sa_bitset; yy->ss_narcs = 0; yy->ss_arc = NULL; yy->ss_deleted = 0; yy->ss_finish = testbit(yy->ss_ss, nf->nf_finish); done: ; } } if (Py_DebugFlag) printssdfa(xx_nstates, xx_state, nbits, &gr->gr_ll, "before minimizing"); simplify(xx_nstates, xx_state); if (Py_DebugFlag) printssdfa(xx_nstates, xx_state, nbits, &gr->gr_ll, "after minimizing"); convert(d, xx_nstates, xx_state); /* XXX cleanup */ PyObject_FREE(xx_state); }
static node * parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret, int *flags) { parser_state *ps; node *n; int started = 0; growable_int_array type_ignores; if (!growable_int_array_init(&type_ignores, 10)) { err_ret->error = E_NOMEM; Ta27Tokenizer_Free(tok); return NULL; } if ((ps = Ta27Parser_New(g, start)) == NULL) { fprintf(stderr, "no mem for new parser\n"); err_ret->error = E_NOMEM; Ta27Tokenizer_Free(tok); return NULL; } #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD if (*flags & PyPARSE_PRINT_IS_FUNCTION) { ps->p_flags |= CO_FUTURE_PRINT_FUNCTION; } if (*flags & PyPARSE_UNICODE_LITERALS) { ps->p_flags |= CO_FUTURE_UNICODE_LITERALS; } #endif for (;;) { char *a, *b; int type; size_t len; char *str; int col_offset; type = Ta27Tokenizer_Get(tok, &a, &b); if (type == ERRORTOKEN) { err_ret->error = tok->done; break; } if (type == ENDMARKER && started) { type = NEWLINE; /* Add an extra newline */ started = 0; /* Add the right number of dedent tokens, except if a certain flag is given -- codeop.py uses this. */ if (tok->indent && !(*flags & PyPARSE_DONT_IMPLY_DEDENT)) { tok->pendin = -tok->indent; tok->indent = 0; } } else started = 1; len = b - a; /* XXX this may compute NULL - NULL */ str = (char *) PyObject_MALLOC(len + 1); if (str == NULL) { fprintf(stderr, "no mem for next token\n"); err_ret->error = E_NOMEM; break; } if (len > 0) strncpy(str, a, len); str[len] = '\0'; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD #endif if (a >= tok->line_start) col_offset = a - tok->line_start; else col_offset = -1; if (type == TYPE_IGNORE) { if (!growable_int_array_add(&type_ignores, tok->lineno)) { err_ret->error = E_NOMEM; break; } continue; } if ((err_ret->error = Ta27Parser_AddToken(ps, (int)type, str, tok->lineno, col_offset, &(err_ret->expected))) != E_OK) { if (err_ret->error != E_DONE) { PyObject_FREE(str); err_ret->token = type; } break; } } if (err_ret->error == E_DONE) { n = ps->p_tree; ps->p_tree = NULL; if (n->n_type == file_input) { /* Put type_ignore nodes in the ENDMARKER of file_input. */ int num; node *ch; size_t i; num = NCH(n); ch = CHILD(n, num - 1); REQ(ch, ENDMARKER); for (i = 0; i < type_ignores.num_items; i++) { Ta27Node_AddChild(ch, TYPE_IGNORE, NULL, type_ignores.items[i], 0); } } growable_int_array_deallocate(&type_ignores); } else n = NULL; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD *flags = ps->p_flags; #endif Ta27Parser_Delete(ps); if (n == NULL) { if (tok->lineno <= 1 && tok->done == E_EOF) err_ret->error = E_EOF; err_ret->lineno = tok->lineno; if (tok->buf != NULL) { char *text = NULL; size_t len; assert(tok->cur - tok->buf < INT_MAX); err_ret->offset = (int)(tok->cur - tok->buf); len = tok->inp - tok->buf; #ifdef Py_USING_UNICODE text = Ta27Tokenizer_RestoreEncoding(tok, len, &err_ret->offset); #endif if (text == NULL) { text = (char *) PyObject_MALLOC(len + 1); if (text != NULL) { if (len > 0) strncpy(text, tok->buf, len); text[len] = '\0'; } } err_ret->text = text; } } else if (tok->encoding != NULL) { /* 'nodes->n_str' uses PyObject_*, while 'tok->encoding' was * allocated using PyMem_ */ node* r = Ta27Node_New(encoding_decl); if (r) r->n_str = PyObject_MALLOC(strlen(tok->encoding)+1); if (!r || !r->n_str) { err_ret->error = E_NOMEM; if (r) PyObject_FREE(r); n = NULL; goto done; } strcpy(r->n_str, tok->encoding); PyMem_FREE(tok->encoding); tok->encoding = NULL; r->n_nchildren = 1; r->n_child = n; n = r; } done: Ta27Tokenizer_Free(tok); return n; }
PyObject* SlopNA_New(PyObject* exc_type, PyObject* exc_value, PyObject* exc_traceback) { assert(pg_activated); // TODO: use a free list like intobject and friends SlopNAObject* self = (SlopNAObject*)PyObject_MALLOC(sizeof(SlopNAObject)); if (self == NULL) return (PyObject *)PyErr_NoMemory(); PyObject_INIT(self, &SlopNA_Type); // we can directly take these 2 fields, since they're picklable self->exc_type = exc_type; // could be NULL, in which case use Py_None if (exc_value) { self->exc_value = exc_value; } else { self->exc_value = Py_None; } Py_INCREF(self->exc_type); Py_INCREF(self->exc_value); // unfortunately exc_traceback isn't picklable, so we'll need to // call PyTraceBack_Print to print the traceback into a // cStringIO buffer, then convert that to a string if (!PycStringIO) { PycString_IMPORT; // don't repeat imports } PyObject* buf = PycStringIO->NewOutput(128); PyTraceBack_Print(exc_traceback, buf); self->exc_traceback_str = PycStringIO->cgetvalue(buf); Py_DECREF(buf); self->next_NA = NULL; // log this creation event in both verbose and binary logs: // for verbose log: PyObject* repr = NA_detailed_repr(self); PG_LOG(PyString_AsString(repr)); Py_DECREF(repr); // for binary log, each line is: // base64.b64encode(cPickle.dumps(context, -1)) // // where context is a dict with the following fields: // exc_type, exc_value, locals PyObject* context = PyDict_New(); PyObject* type_repr = PyObject_Repr(self->exc_type); PyObject* value_repr = PyObject_Repr(self->exc_value); PyDict_SetItemString(context, "exc_type", type_repr); PyDict_SetItemString(context, "exc_value", value_repr); PyDict_SetItemString(context, "locals", PyEval_GetLocals()); // pass in -1 to force cPickle to use a binary protocol PyObject* negative_one = PyInt_FromLong(-1); PyObject* pickled_context = PyObject_CallFunctionObjArgs(cPickle_dumpstr_func, context, negative_one, NULL); if (!pickled_context) { assert(PyErr_Occurred()); PyErr_Clear(); // hmmm, let's try removing locals and seeing if it's now picklable PyDict_DelItemString(context, "locals"); pickled_context = PyObject_CallFunctionObjArgs(cPickle_dumpstr_func, context, negative_one, NULL); } if (!pickled_context) { assert(PyErr_Occurred()); PyErr_Clear(); fprintf(stderr, "ERROR: pickled_context is unpicklable\n"); Py_Exit(1); } PyObject* encoded_line = PyObject_CallFunctionObjArgs(b64encode_func, pickled_context, NULL); fprintf(binary_log_file, "%s\n", PyString_AsString(encoded_line)); Py_DECREF(encoded_line); Py_DECREF(negative_one); Py_DECREF(context); Py_DECREF(value_repr); Py_DECREF(type_repr); return (PyObject*)self; }
static node * parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret, int *flags) { parser_state *ps; node *n; int started = 0; if ((ps = PyParser_New(g, start)) == NULL) { err_ret->error = E_NOMEM; PyTokenizer_Free(tok); return NULL; } #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD if (*flags & PyPARSE_BARRY_AS_BDFL) ps->p_flags |= CO_FUTURE_BARRY_AS_BDFL; #endif for (;;) { char *a, *b; int type; size_t len; char *str; int col_offset; type = PyTokenizer_Get(tok, &a, &b); if (type == ERRORTOKEN) { err_ret->error = tok->done; break; } if (type == ENDMARKER && started) { type = NEWLINE; /* Add an extra newline */ started = 0; /* Add the right number of dedent tokens, except if a certain flag is given -- codeop.py uses this. */ if (tok->indent && !(*flags & PyPARSE_DONT_IMPLY_DEDENT)) { tok->pendin = -tok->indent; tok->indent = 0; } } else started = 1; len = b - a; /* XXX this may compute NULL - NULL */ str = (char *) PyObject_MALLOC(len + 1); if (str == NULL) { err_ret->error = E_NOMEM; break; } if (len > 0) strncpy(str, a, len); str[len] = '\0'; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD if (type == NOTEQUAL) { if (!(ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) && strcmp(str, "!=")) { PyObject_FREE(str); err_ret->error = E_SYNTAX; break; } else if ((ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) && strcmp(str, "<>")) { PyObject_FREE(str); err_ret->text = "with Barry as BDFL, use '<>' " "instead of '!='"; err_ret->error = E_SYNTAX; break; } } #endif if (a >= tok->line_start) col_offset = Py_SAFE_DOWNCAST(a - tok->line_start, Py_intptr_t, int); else col_offset = -1; if ((err_ret->error = PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset, &(err_ret->expected))) != E_OK) { if (err_ret->error != E_DONE) { PyObject_FREE(str); err_ret->token = type; } break; } }
static node * parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret, int *flags) { parser_state *ps; node *n; int started = 0, handling_import = 0, handling_with = 0; if ((ps = PyParser_New(g, start)) == NULL) { fprintf(stderr, "no mem for new parser\n"); err_ret->error = E_NOMEM; PyTokenizer_Free(tok); return NULL; } #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD if (*flags & PyPARSE_PRINT_IS_FUNCTION) { ps->p_flags |= CO_FUTURE_PRINT_FUNCTION; } if (*flags & PyPARSE_UNICODE_LITERALS) { ps->p_flags |= CO_FUTURE_UNICODE_LITERALS; } #endif for (;;) { char *a, *b; int type; size_t len; char *str; int col_offset; type = PyTokenizer_Get(tok, &a, &b); if (type == ERRORTOKEN) { err_ret->error = tok->done; break; } if (type == ENDMARKER && started) { type = NEWLINE; /* Add an extra newline */ handling_with = handling_import = 0; started = 0; /* Add the right number of dedent tokens, except if a certain flag is given -- codeop.py uses this. */ if (tok->indent && !(*flags & PyPARSE_DONT_IMPLY_DEDENT)) { tok->pendin = -tok->indent; tok->indent = 0; } } else started = 1; len = b - a; /* XXX this may compute NULL - NULL */ str = (char *) PyObject_MALLOC(len + 1); if (str == NULL) { fprintf(stderr, "no mem for next token\n"); err_ret->error = E_NOMEM; break; } if (len > 0) strncpy(str, a, len); str[len] = '\0'; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD #endif if (a >= tok->line_start) col_offset = a - tok->line_start; else col_offset = -1; if ((err_ret->error = PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset, &(err_ret->expected))) != E_OK) { if (err_ret->error != E_DONE) { PyObject_FREE(str); err_ret->token = type; } break; } } if (err_ret->error == E_DONE) { n = ps->p_tree; ps->p_tree = NULL; } else n = NULL; #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD *flags = ps->p_flags; #endif PyParser_Delete(ps); if (n == NULL) { if (tok->lineno <= 1 && tok->done == E_EOF) err_ret->error = E_EOF; err_ret->lineno = tok->lineno; if (tok->buf != NULL) { char *text = NULL; size_t len; assert(tok->cur - tok->buf < INT_MAX); err_ret->offset = (int)(tok->cur - tok->buf); len = tok->inp - tok->buf; #ifdef Py_USING_UNICODE text = PyTokenizer_RestoreEncoding(tok, len, &err_ret->offset); #endif if (text == NULL) { text = (char *) PyObject_MALLOC(len + 1); if (text != NULL) { if (len > 0) strncpy(text, tok->buf, len); text[len] = '\0'; } } err_ret->text = text; } } else if (tok->encoding != NULL) { node* r = PyNode_New(encoding_decl); if (!r) { err_ret->error = E_NOMEM; n = NULL; goto done; } r->n_str = tok->encoding; r->n_nchildren = 1; r->n_child = n; tok->encoding = NULL; n = r; } done: PyTokenizer_Free(tok); return n; }