Branch data Line data Source code
1 : : #ifndef Py_INTERNAL_GC_H 2 : : #define Py_INTERNAL_GC_H 3 : : #ifdef __cplusplus 4 : : extern "C" { 5 : : #endif 6 : : 7 : : #ifndef Py_BUILD_CORE 8 : : # error "this header requires Py_BUILD_CORE define" 9 : : #endif 10 : : 11 : : /* GC information is stored BEFORE the object structure. */ 12 : : typedef struct { 13 : : // Pointer to next object in the list. 14 : : // 0 means the object is not tracked 15 : : uintptr_t _gc_next; 16 : : 17 : : // Pointer to previous object in the list. 18 : : // Lowest two bits are used for flags documented later. 19 : : uintptr_t _gc_prev; 20 : : } PyGC_Head; 21 : : 22 : 2564317865 : static inline PyGC_Head* _Py_AS_GC(PyObject *op) { 23 : 2564317865 : return (_Py_CAST(PyGC_Head*, op) - 1); 24 : : } 25 : : #define _PyGC_Head_UNUSED PyGC_Head 26 : : 27 : : /* True if the object is currently tracked by the GC. */ 28 : 1170426733 : static inline int _PyObject_GC_IS_TRACKED(PyObject *op) { 29 : 1170426733 : PyGC_Head *gc = _Py_AS_GC(op); 30 : 1170426733 : return (gc->_gc_next != 0); 31 : : } 32 : : #define _PyObject_GC_IS_TRACKED(op) _PyObject_GC_IS_TRACKED(_Py_CAST(PyObject*, op)) 33 : : 34 : : /* True if the object may be tracked by the GC in the future, or already is. 35 : : This can be useful to implement some optimizations. */ 36 : 461127744 : static inline int _PyObject_GC_MAY_BE_TRACKED(PyObject *obj) { 37 [ + + ]: 461127744 : if (!PyObject_IS_GC(obj)) { 38 : 362327101 : return 0; 39 : : } 40 [ + + ]: 98800643 : if (PyTuple_CheckExact(obj)) { 41 : 11414995 : return _PyObject_GC_IS_TRACKED(obj); 42 : : } 43 : 87385648 : return 1; 44 : : } 45 : : 46 : : 47 : : /* Bit flags for _gc_prev */ 48 : : /* Bit 0 is set when tp_finalize is called */ 49 : : #define _PyGC_PREV_MASK_FINALIZED (1) 50 : : /* Bit 1 is set when the object is in generation which is GCed currently. */ 51 : : #define _PyGC_PREV_MASK_COLLECTING (2) 52 : : /* The (N-2) most significant bits contain the real address. */ 53 : : #define _PyGC_PREV_SHIFT (2) 54 : : #define _PyGC_PREV_MASK (((uintptr_t) -1) << _PyGC_PREV_SHIFT) 55 : : 56 : : // Lowest bit of _gc_next is used for flags only in GC. 57 : : // But it is always 0 for normal code. 58 : 3316727227 : static inline PyGC_Head* _PyGCHead_NEXT(PyGC_Head *gc) { 59 : 3316727227 : uintptr_t next = gc->_gc_next; 60 : 3316727227 : return _Py_CAST(PyGC_Head*, next); 61 : : } 62 : 2259749464 : static inline void _PyGCHead_SET_NEXT(PyGC_Head *gc, PyGC_Head *next) { 63 : 2259749464 : gc->_gc_next = _Py_CAST(uintptr_t, next); 64 : 2259749464 : } 65 : : 66 : : // Lowest two bits of _gc_prev is used for _PyGC_PREV_MASK_* flags. 67 : 905046473 : static inline PyGC_Head* _PyGCHead_PREV(PyGC_Head *gc) { 68 : 905046473 : uintptr_t prev = (gc->_gc_prev & _PyGC_PREV_MASK); 69 : 905046473 : return _Py_CAST(PyGC_Head*, prev); 70 : : } 71 : 2214724981 : static inline void _PyGCHead_SET_PREV(PyGC_Head *gc, PyGC_Head *prev) { 72 : 2214724981 : uintptr_t uprev = _Py_CAST(uintptr_t, prev); 73 : : assert((uprev & ~_PyGC_PREV_MASK) == 0); 74 : 2214724981 : gc->_gc_prev = ((gc->_gc_prev & ~_PyGC_PREV_MASK) | uprev); 75 : 2214724981 : } 76 : : 77 : 45057092 : static inline int _PyGCHead_FINALIZED(PyGC_Head *gc) { 78 : 45057092 : return ((gc->_gc_prev & _PyGC_PREV_MASK_FINALIZED) != 0); 79 : : } 80 : 22214642 : static inline void _PyGCHead_SET_FINALIZED(PyGC_Head *gc) { 81 : 22214642 : gc->_gc_prev |= _PyGC_PREV_MASK_FINALIZED; 82 : 22214642 : } 83 : : 84 : 22416443 : static inline int _PyGC_FINALIZED(PyObject *op) { 85 : 22416443 : PyGC_Head *gc = _Py_AS_GC(op); 86 : 22416443 : return _PyGCHead_FINALIZED(gc); 87 : : } 88 : 22190732 : static inline void _PyGC_SET_FINALIZED(PyObject *op) { 89 : 22190732 : PyGC_Head *gc = _Py_AS_GC(op); 90 : 22190732 : _PyGCHead_SET_FINALIZED(gc); 91 : 22190732 : } 92 : : 93 : : 94 : : /* GC runtime state */ 95 : : 96 : : /* If we change this, we need to change the default value in the 97 : : signature of gc.collect. */ 98 : : #define NUM_GENERATIONS 3 99 : : /* 100 : : NOTE: about untracking of mutable objects. 101 : : 102 : : Certain types of container cannot participate in a reference cycle, and 103 : : so do not need to be tracked by the garbage collector. Untracking these 104 : : objects reduces the cost of garbage collections. However, determining 105 : : which objects may be untracked is not free, and the costs must be 106 : : weighed against the benefits for garbage collection. 107 : : 108 : : There are two possible strategies for when to untrack a container: 109 : : 110 : : i) When the container is created. 111 : : ii) When the container is examined by the garbage collector. 112 : : 113 : : Tuples containing only immutable objects (integers, strings etc, and 114 : : recursively, tuples of immutable objects) do not need to be tracked. 115 : : The interpreter creates a large number of tuples, many of which will 116 : : not survive until garbage collection. It is therefore not worthwhile 117 : : to untrack eligible tuples at creation time. 118 : : 119 : : Instead, all tuples except the empty tuple are tracked when created. 120 : : During garbage collection it is determined whether any surviving tuples 121 : : can be untracked. A tuple can be untracked if all of its contents are 122 : : already not tracked. Tuples are examined for untracking in all garbage 123 : : collection cycles. It may take more than one cycle to untrack a tuple. 124 : : 125 : : Dictionaries containing only immutable objects also do not need to be 126 : : tracked. Dictionaries are untracked when created. If a tracked item is 127 : : inserted into a dictionary (either as a key or value), the dictionary 128 : : becomes tracked. During a full garbage collection (all generations), 129 : : the collector will untrack any dictionaries whose contents are not 130 : : tracked. 131 : : 132 : : The module provides the python function is_tracked(obj), which returns 133 : : the CURRENT tracking status of the object. Subsequent garbage 134 : : collections may change the tracking status of the object. 135 : : 136 : : Untracking of certain containers was introduced in issue #4688, and 137 : : the algorithm was refined in response to issue #14775. 138 : : */ 139 : : 140 : : struct gc_generation { 141 : : PyGC_Head head; 142 : : int threshold; /* collection threshold */ 143 : : int count; /* count of allocations or collections of younger 144 : : generations */ 145 : : }; 146 : : 147 : : /* Running stats per generation */ 148 : : struct gc_generation_stats { 149 : : /* total number of collections */ 150 : : Py_ssize_t collections; 151 : : /* total number of collected objects */ 152 : : Py_ssize_t collected; 153 : : /* total number of uncollectable objects (put into gc.garbage) */ 154 : : Py_ssize_t uncollectable; 155 : : }; 156 : : 157 : : struct _gc_runtime_state { 158 : : /* List of objects that still need to be cleaned up, singly linked 159 : : * via their gc headers' gc_prev pointers. */ 160 : : PyObject *trash_delete_later; 161 : : /* Current call-stack depth of tp_dealloc calls. */ 162 : : int trash_delete_nesting; 163 : : 164 : : /* Is automatic collection enabled? */ 165 : : int enabled; 166 : : int debug; 167 : : /* linked lists of container objects */ 168 : : struct gc_generation generations[NUM_GENERATIONS]; 169 : : PyGC_Head *generation0; 170 : : /* a permanent generation which won't be collected */ 171 : : struct gc_generation permanent_generation; 172 : : struct gc_generation_stats generation_stats[NUM_GENERATIONS]; 173 : : /* true if we are currently running the collector */ 174 : : int collecting; 175 : : /* list of uncollectable objects */ 176 : : PyObject *garbage; 177 : : /* a list of callbacks to be invoked when collection is performed */ 178 : : PyObject *callbacks; 179 : : /* This is the number of objects that survived the last full 180 : : collection. It approximates the number of long lived objects 181 : : tracked by the GC. 182 : : 183 : : (by "full collection", we mean a collection of the oldest 184 : : generation). */ 185 : : Py_ssize_t long_lived_total; 186 : : /* This is the number of objects that survived all "non-full" 187 : : collections, and are awaiting to undergo a full collection for 188 : : the first time. */ 189 : : Py_ssize_t long_lived_pending; 190 : : }; 191 : : 192 : : 193 : : extern void _PyGC_InitState(struct _gc_runtime_state *); 194 : : 195 : : extern Py_ssize_t _PyGC_CollectNoFail(PyThreadState *tstate); 196 : : 197 : : 198 : : // Functions to clear types free lists 199 : : extern void _PyTuple_ClearFreeList(PyInterpreterState *interp); 200 : : extern void _PyFloat_ClearFreeList(PyInterpreterState *interp); 201 : : extern void _PyList_ClearFreeList(PyInterpreterState *interp); 202 : : extern void _PyDict_ClearFreeList(PyInterpreterState *interp); 203 : : extern void _PyAsyncGen_ClearFreeLists(PyInterpreterState *interp); 204 : : extern void _PyContext_ClearFreeList(PyInterpreterState *interp); 205 : : 206 : : #ifdef __cplusplus 207 : : } 208 : : #endif 209 : : #endif /* !Py_INTERNAL_GC_H */