PyObject * PyStructSequence_New(PyTypeObject *type) { PyStructSequence *obj; Py_ssize_t size = REAL_SIZE_TP(type), i; obj = PyObject_GC_NewVar(PyStructSequence, type, size); if (obj == NULL) return NULL; /* Hack the size of the variable object, so invisible fields don't appear to Python code. */ Py_SIZE(obj) = VISIBLE_SIZE_TP(type); for (i = 0; i < size; i++) obj->ob_item[i] = NULL; return (PyObject*)obj; }
PyObject * ApgRecord_New(PyObject *mapping, Py_ssize_t size) { ApgRecordObject *o; Py_ssize_t i; if (size < 1 || mapping == NULL) { PyErr_BadInternalCall(); return NULL; } if (size < ApgRecord_MAXSAVESIZE && (o = free_list[size]) != NULL) { free_list[size] = (ApgRecordObject *) o->ob_item[0]; numfree[size]--; _Py_NewReference((PyObject *)o); } else { /* Check for overflow */ if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(ApgRecordObject) - sizeof(PyObject *)) / sizeof(PyObject *)) { return PyErr_NoMemory(); } o = PyObject_GC_NewVar(ApgRecordObject, &ApgRecord_Type, size); if (o == NULL) { return NULL; } } for (i = 0; i < size; i++) { o->ob_item[i] = NULL; } Py_INCREF(mapping); o->mapping = mapping; o->self_hash = -1; _PyObject_GC_TRACK(o); return (PyObject *) o; }
static CThunkObject* CThunkObject_new(Py_ssize_t nArgs) { CThunkObject *p; int i; p = PyObject_GC_NewVar(CThunkObject, &PyCThunk_Type, nArgs); if (p == NULL) { PyErr_NoMemory(); return NULL; } p->pcl = NULL; memset(&p->cif, 0, sizeof(p->cif)); p->converters = NULL; p->callable = NULL; p->setfunc = NULL; p->ffi_restype = NULL; for (i = 0; i < nArgs + 1; ++i) p->atypes[i] = NULL; PyObject_GC_Track((PyObject *)p); return p; }
PyObject * PyTuple_New(register Py_ssize_t size) { register PyTupleObject *op; Py_ssize_t i; if (size < 0) { PyErr_BadInternalCall(); return NULL; } #if PyTuple_MAXSAVESIZE > 0 if (size == 0 && free_list[0]) { op = free_list[0]; Py_INCREF(op); #ifdef COUNT_ALLOCS tuple_zero_allocs++; #endif return (PyObject *) op; } if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) { free_list[size] = (PyTupleObject *) op->ob_item[0]; numfree[size]--; #ifdef COUNT_ALLOCS fast_tuple_allocs++; #endif /* Inline PyObject_InitVar */ #ifdef Py_TRACE_REFS Py_SIZE(op) = size; Py_TYPE(op) = &PyTuple_Type; #endif _Py_NewReference((PyObject *)op); } else #endif { Py_ssize_t nbytes = size * sizeof(PyObject *); /* Check for overflow */ if (nbytes / sizeof(PyObject *) != (size_t)size || (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *))) { return PyErr_NoMemory(); } op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size); if (op == NULL) return NULL; } for (i=0; i < size; i++) op->ob_item[i] = NULL; #if PyTuple_MAXSAVESIZE > 0 if (size == 0) { free_list[0] = op; ++numfree[0]; Py_INCREF(op); /* extra INCREF so that this is never freed */ } #endif #ifdef SHOW_TRACK_COUNT count_tracked++; #endif _PyObject_GC_TRACK(op); return (PyObject *) op; }
static PyFrameObject *MAKE_FRAME( PyCodeObject *code, PyObject *module, bool is_module ) { PyTraceBack_Type.tp_dealloc = (destructor)tb_dealloc; assertCodeObject( code ); PyObject *globals = ((PyModuleObject *)module)->md_dict; assert( PyDict_Check( globals ) ); Py_ssize_t ncells = PyTuple_GET_SIZE( code->co_cellvars ); Py_ssize_t nfrees = PyTuple_GET_SIZE( code->co_freevars ); Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; Nuitka_FrameObject *result = PyObject_GC_NewVar( Nuitka_FrameObject, &Nuitka_Frame_Type, extras ); if (unlikely( result == NULL )) { return NULL; } PyFrameObject *frame = &result->m_frame; frame->f_code = code; extras = code->co_nlocals + ncells + nfrees; frame->f_valuestack = frame->f_localsplus + extras; for ( Py_ssize_t i = 0; i < extras; i++ ) { frame->f_localsplus[i] = NULL; } frame->f_locals = NULL; frame->f_trace = INCREASE_REFCOUNT( Py_None ); frame->f_exc_type = NULL; frame->f_exc_value = NULL; frame->f_exc_traceback = NULL; frame->f_stacktop = frame->f_valuestack; frame->f_builtins = INCREASE_REFCOUNT( (PyObject *)dict_builtin ); frame->f_back = NULL; frame->f_globals = INCREASE_REFCOUNT( globals ); if (likely( (code->co_flags & CO_OPTIMIZED ) == CO_OPTIMIZED )) { frame->f_locals = NULL; } else if (is_module) { frame->f_locals = INCREASE_REFCOUNT( globals ); } else { frame->f_locals = PyDict_New(); if (unlikely( frame->f_locals == NULL )) { Py_DECREF( result ); return NULL; } PyDict_SetItem( frame->f_locals, const_str_plain___module__, MODULE_NAME( module ) ); } #if PYTHON_VERSION < 340 frame->f_tstate = PyThreadState_GET(); #endif frame->f_lasti = -1; frame->f_lineno = code->co_firstlineno; frame->f_iblock = 0; #if PYTHON_VERSION >= 340 frame->f_gen = NULL; frame->f_executing = 0; #endif Nuitka_GC_Track( result ); return (PyFrameObject *)result; }
PyFrameObject * PyFrame_New(PyThreadState *tstate, PyCodeObject *code, PyObject *globals, PyObject *locals) { PyFrameObject *back = tstate->frame; PyFrameObject *f; PyObject *builtins; Py_ssize_t i; #ifdef Py_DEBUG if (code == NULL || globals == NULL || !PyDict_Check(globals) || (locals != NULL && !PyMapping_Check(locals))) { PyErr_BadInternalCall(); return NULL; } #endif if (back == NULL || back->f_globals != globals) { builtins = PyDict_GetItem(globals, builtin_object); if (builtins) { if (PyModule_Check(builtins)) { builtins = PyModule_GetDict(builtins); assert(!builtins || PyDict_Check(builtins)); } else if (!PyDict_Check(builtins)) builtins = NULL; } if (builtins == NULL) { /* No builtins! Make up a minimal one Give them 'None', at least. */ builtins = PyDict_New(); if (builtins == NULL || PyDict_SetItemString( builtins, "None", Py_None) < 0) return NULL; } else Py_INCREF(builtins); } else { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; assert(builtins != NULL && PyDict_Check(builtins)); Py_INCREF(builtins); } if (code->co_zombieframe != NULL) { f = code->co_zombieframe; code->co_zombieframe = NULL; _Py_NewReference((PyObject *)f); assert(f->f_code == code); } else { Py_ssize_t extras, ncells, nfrees; ncells = PyTuple_GET_SIZE(code->co_cellvars); nfrees = PyTuple_GET_SIZE(code->co_freevars); extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; if (free_list == NULL) { f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); if (f == NULL) { Py_DECREF(builtins); return NULL; } } else { assert(numfree > 0); --numfree; f = free_list; free_list = free_list->f_back; if (f->ob_size < extras) { f = PyObject_GC_Resize(PyFrameObject, f, extras); if (f == NULL) { Py_DECREF(builtins); return NULL; } } _Py_NewReference((PyObject *)f); } f->f_code = code; extras = code->co_nlocals + ncells + nfrees; f->f_valuestack = f->f_localsplus + extras; for (i=0; i<extras; i++) f->f_localsplus[i] = NULL; f->f_locals = NULL; f->f_trace = NULL; f->f_exc_type = f->f_exc_value = f->f_exc_traceback = NULL; } f->f_stacktop = f->f_valuestack; f->f_builtins = builtins; Py_XINCREF(back); f->f_back = back; Py_INCREF(code); Py_INCREF(globals); f->f_globals = globals; /* Most functions have CO_NEWLOCALS and CO_OPTIMIZED set. */ if ((code->co_flags & (CO_NEWLOCALS | CO_OPTIMIZED)) == (CO_NEWLOCALS | CO_OPTIMIZED)) ; /* f_locals = NULL; will be set by PyFrame_FastToLocals() */ else if (code->co_flags & CO_NEWLOCALS) { locals = PyDict_New(); if (locals == NULL) { Py_DECREF(f); return NULL; } f->f_locals = locals; } else { if (locals == NULL) locals = globals; Py_INCREF(locals); f->f_locals = locals; } f->f_tstate = tstate; f->f_lasti = -1; f->f_lineno = code->co_firstlineno; f->f_iblock = 0; #ifdef STACKLESS f->f_execute = NULL; #endif _PyObject_GC_TRACK(f); return f; }
PyFrameObject * PyFrame_New(PyThreadState *tstate, PyCodeObject *code, PyObject *globals, PyObject *locals) { LOG("> PyFrame_New\n"); PyFrameObject *back = tstate->frame; PyFrameObject *f; PyObject *builtins; int extras, ncells, nfrees; ncells = PyTuple_GET_SIZE(code->co_cellvars); nfrees = PyTuple_GET_SIZE(code->co_freevars); extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; if (back == NULL || back->f_globals != globals) { builtins = PyDict_GetItem(globals, builtin_object); if (builtins) { if (PyModule_Check(builtins)) { builtins = PyModule_GetDict(builtins); } else if (!PyDict_Check(builtins)) builtins = NULL; } if (builtins == NULL) { /* No builtins! Make up a minimal one Give them 'None', at least. */ builtins = PyDict_New(); if (builtins == NULL || PyDict_SetItemString( builtins, "None", Py_None) < 0) return NULL; } else Py_INCREF(builtins); } else { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; Py_INCREF(builtins); } if (free_list == NULL) { f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); if (f == NULL) return NULL; } else { /* NOT IMPLEMENTED */ } f->f_builtins = builtins; Py_XINCREF(back); f->f_back = back; Py_INCREF(code); f->f_code = code; Py_INCREF(globals); f->f_globals = globals; /* Most functions have CO_NEWLOCALS and CO_OPTIMIZED set. */ if ((code->co_flags & (CO_NEWLOCALS | CO_OPTIMIZED)) == (CO_NEWLOCALS | CO_OPTIMIZED)) locals = NULL; /* PyFrame_Fast2Locals() will set. */ else if (code->co_flags & CO_NEWLOCALS) { locals = PyDict_New(); if (locals == NULL) { Py_DECREF(f); return NULL; } } else { if (locals == NULL) locals = globals; Py_INCREF(locals); } f->f_locals = locals; f->f_trace = NULL; f->f_exc_type = f->f_exc_value = f->f_exc_traceback = NULL; f->f_tstate = tstate; f->f_lasti = -1; f->f_lineno = code->co_firstlineno; f->f_restricted = (builtins != tstate->interp->builtins); f->f_iblock = 0; f->f_nlocals = code->co_nlocals; f->f_stacksize = code->co_stacksize; f->f_ncells = ncells; f->f_nfreevars = nfrees; extras = f->f_nlocals + ncells + nfrees; memset(f->f_localsplus, 0, extras * sizeof(f->f_localsplus[0])); f->f_valuestack = f->f_localsplus + extras; f->f_stacktop = f->f_valuestack; _PyObject_GC_TRACK(f); LOG("< PyFrame_New\n"); return f; }
PyObject * PyTuple_New(register Py_ssize_t size) { register PyTupleObject *op; Py_ssize_t i; if (size < 0) { PyErr_BadInternalCall(); return NULL; } #if PyTuple_MAXSAVESIZE > 0 if (size == 0 && free_list[0]) { //jputs("EmptyTuple!"); //jputsLong(free_list[0]); op = free_list[0]; Py_INCREF(op); #ifdef COUNT_ALLOCS tuple_zero_allocs++; #endif return (PyObject *) op; } if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) { free_list[size] = (PyTupleObject *) op->ob_item[0]; numfree[size]--; #ifdef COUNT_ALLOCS fast_tuple_allocs++; #endif /* Inline PyObject_InitVar */ #ifdef Py_TRACE_REFS Py_SIZE(op) = size; Py_TYPE(op) = &PyTuple_Type; #endif _PyObject_GC_InitJy(op, &(builtinTypes[TME_INDEX_Tuple])); _Py_NewReference((PyObject *)op); // jputs(__FUNCTION__); // jputsLong(__LINE__); // jputsLong(size); // jputsLong((jlong) op); //jputsLong(Py_TYPE(op)); //jputsLong(&PyTuple_Type); JyNIDebug(JY_NATIVE_ALLOC_GC | JY_INLINE_MASK, op, AS_JY_WITH_GC(op), size, PyTuple_Type.tp_name); } else #endif { Py_ssize_t nbytes = size * sizeof(PyObject *); /* Check for overflow */ if (nbytes / sizeof(PyObject *) != (size_t)size || (nbytes > PY_SSIZE_T_MAX - sizeof(PyTupleObject) - sizeof(PyObject *))) { return PyErr_NoMemory(); } op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size); if (op == NULL) return NULL; } for (i=0; i < size; i++) op->ob_item[i] = NULL; #if PyTuple_MAXSAVESIZE > 0 if (size == 0) { free_list[0] = op; ++numfree[0]; JyNIDebug(JY_NATIVE_INCREF | JY_IMMORTAL_MASK | JY_PRE_MASK, op, AS_JY_WITH_GC(op), size, PyTuple_Type.tp_name); Py_INCREF(op); /* extra INCREF so that this is never freed */ } #endif #ifdef SHOW_TRACK_COUNT count_tracked++; #endif //_JyNI_GC_TRACK_NoExplore(op); _JyNI_GC_TRACK(op); return (PyObject *) op; }