Skip to content

Commit 5eae218

Browse files
markshannonseehwan80
authored andcommitted
pythonGH-131238: More refactoring of core header files (pythonGH-131351)
Adds new pycore_stats.h header file to help break dependencies involving the pycore_code.h header.
1 parent 9335e1c commit 5eae218

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+264
-187
lines changed

Include/internal/pycore_call.h

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ extern "C" {
99
#endif
1010

1111
#include "pycore_pystate.h" // _PyThreadState_GET()
12+
#include "pycore_stats.h"
1213

1314
/* Suggested size (number of positional arguments) for arrays of PyObject*
1415
allocated on a C stack to avoid allocating memory on the heap memory. Such

Include/internal/pycore_code.h

-62
Original file line numberDiff line numberDiff line change
@@ -318,68 +318,6 @@ extern void _Py_Specialize_Send(_PyStackRef receiver, _Py_CODEUNIT *instr);
318318
extern void _Py_Specialize_ToBool(_PyStackRef value, _Py_CODEUNIT *instr);
319319
extern void _Py_Specialize_ContainsOp(_PyStackRef value, _Py_CODEUNIT *instr);
320320

321-
#ifdef Py_STATS
322-
323-
#include "pycore_bitutils.h" // _Py_bit_length
324-
325-
#define STAT_INC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name++; } while (0)
326-
#define STAT_DEC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name--; } while (0)
327-
#define OPCODE_EXE_INC(opname) do { if (_Py_stats) _Py_stats->opcode_stats[opname].execution_count++; } while (0)
328-
#define CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.name++; } while (0)
329-
#define OBJECT_STAT_INC(name) do { if (_Py_stats) _Py_stats->object_stats.name++; } while (0)
330-
#define OBJECT_STAT_INC_COND(name, cond) \
331-
do { if (_Py_stats && cond) _Py_stats->object_stats.name++; } while (0)
332-
#define EVAL_CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.eval_calls[name]++; } while (0)
333-
#define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) \
334-
do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0)
335-
#define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0)
336-
#define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0)
337-
#define OPT_STAT_ADD(name, n) do { if (_Py_stats) _Py_stats->optimization_stats.name += (n); } while (0)
338-
#define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0)
339-
#define UOP_PAIR_INC(uopcode, lastuop) \
340-
do { \
341-
if (lastuop && _Py_stats) { \
342-
_Py_stats->optimization_stats.opcode[lastuop].pair_count[uopcode]++; \
343-
} \
344-
lastuop = uopcode; \
345-
} while (0)
346-
#define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0)
347-
#define OPT_ERROR_IN_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.error_in_opcode[opname]++; } while (0)
348-
#define OPT_HIST(length, name) \
349-
do { \
350-
if (_Py_stats) { \
351-
int bucket = _Py_bit_length(length >= 1 ? length - 1 : 0); \
352-
bucket = (bucket >= _Py_UOP_HIST_SIZE) ? _Py_UOP_HIST_SIZE - 1 : bucket; \
353-
_Py_stats->optimization_stats.name[bucket]++; \
354-
} \
355-
} while (0)
356-
#define RARE_EVENT_STAT_INC(name) do { if (_Py_stats) _Py_stats->rare_event_stats.name++; } while (0)
357-
#define OPCODE_DEFERRED_INC(opname) do { if (_Py_stats && opcode == opname) _Py_stats->opcode_stats[opname].specialization.deferred++; } while (0)
358-
359-
// Export for '_opcode' shared extension
360-
PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void);
361-
362-
#else
363-
#define STAT_INC(opname, name) ((void)0)
364-
#define STAT_DEC(opname, name) ((void)0)
365-
#define OPCODE_EXE_INC(opname) ((void)0)
366-
#define CALL_STAT_INC(name) ((void)0)
367-
#define OBJECT_STAT_INC(name) ((void)0)
368-
#define OBJECT_STAT_INC_COND(name, cond) ((void)0)
369-
#define EVAL_CALL_STAT_INC(name) ((void)0)
370-
#define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0)
371-
#define GC_STAT_ADD(gen, name, n) ((void)0)
372-
#define OPT_STAT_INC(name) ((void)0)
373-
#define OPT_STAT_ADD(name, n) ((void)0)
374-
#define UOP_STAT_INC(opname, name) ((void)0)
375-
#define UOP_PAIR_INC(uopcode, lastuop) ((void)0)
376-
#define OPT_UNSUPPORTED_OPCODE(opname) ((void)0)
377-
#define OPT_ERROR_IN_OPCODE(opname) ((void)0)
378-
#define OPT_HIST(length, name) ((void)0)
379-
#define RARE_EVENT_STAT_INC(name) ((void)0)
380-
#define OPCODE_DEFERRED_INC(opname) ((void)0)
381-
#endif // !Py_STATS
382-
383321
// Utility functions for reading/writing 32/64-bit values in the inline caches.
384322
// Great care should be taken to ensure that these functions remain correct and
385323
// performant! They should compile to just "move" instructions on all supported

Include/internal/pycore_dict.h

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ extern "C" {
1111
#include "pycore_object.h" // PyManagedDictPointer
1212
#include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_LOAD_SSIZE_ACQUIRE
1313
#include "pycore_stackref.h" // _PyStackRef
14+
#include "pycore_stats.h"
1415

1516
// Unsafe flavor of PyDict_GetItemWithError(): no error checking
1617
extern PyObject* _PyDict_GetItemWithError(PyObject *dp, PyObject *key);

Include/internal/pycore_frame.h

+1
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ extern "C" {
1212
#include <stddef.h> // offsetof()
1313
#include "pycore_code.h" // STATS
1414
#include "pycore_stackref.h" // _PyStackRef
15+
#include "pycore_stats.h"
1516

1617
/* See InternalDocs/frames.md for an explanation of the frame stack
1718
* including explanation of the PyFrameObject and _PyInterpreterFrame

Include/internal/pycore_freelist.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ extern "C" {
1111
#include "pycore_freelist_state.h" // struct _Py_freelists
1212
#include "pycore_object.h" // _PyObject_IS_GC
1313
#include "pycore_pystate.h" // _PyThreadState_GET
14-
#include "pycore_code.h" // OBJECT_STAT_INC
14+
#include "pycore_stats.h" // OBJECT_STAT_INC
1515

1616
static inline struct _Py_freelists *
1717
_Py_freelists_GET(void)

Include/internal/pycore_gc.h

+81
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ extern "C" {
99
#endif
1010

1111
#include "pycore_runtime_structs.h"
12+
#include "pycore_pystate.h"
1213

1314

1415
/* Get an object's GC head */
@@ -203,6 +204,86 @@ static inline void _PyGC_CLEAR_FINALIZED(PyObject *op) {
203204
#endif
204205
}
205206

207+
208+
/* Tell the GC to track this object.
209+
*
210+
* The object must not be tracked by the GC.
211+
*
212+
* NB: While the object is tracked by the collector, it must be safe to call the
213+
* ob_traverse method.
214+
*
215+
* Internal note: interp->gc.generation0->_gc_prev doesn't have any bit flags
216+
* because it's not object header. So we don't use _PyGCHead_PREV() and
217+
* _PyGCHead_SET_PREV() for it to avoid unnecessary bitwise operations.
218+
*
219+
* See also the public PyObject_GC_Track() function.
220+
*/
221+
static inline void _PyObject_GC_TRACK(
222+
// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
223+
#ifndef NDEBUG
224+
const char *filename, int lineno,
225+
#endif
226+
PyObject *op)
227+
{
228+
_PyObject_ASSERT_FROM(op, !_PyObject_GC_IS_TRACKED(op),
229+
"object already tracked by the garbage collector",
230+
filename, lineno, __func__);
231+
#ifdef Py_GIL_DISABLED
232+
_PyObject_SET_GC_BITS(op, _PyGC_BITS_TRACKED);
233+
#else
234+
PyGC_Head *gc = _Py_AS_GC(op);
235+
_PyObject_ASSERT_FROM(op,
236+
(gc->_gc_prev & _PyGC_PREV_MASK_COLLECTING) == 0,
237+
"object is in generation which is garbage collected",
238+
filename, lineno, __func__);
239+
240+
PyInterpreterState *interp = _PyInterpreterState_GET();
241+
PyGC_Head *generation0 = &interp->gc.young.head;
242+
PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev);
243+
_PyGCHead_SET_NEXT(last, gc);
244+
_PyGCHead_SET_PREV(gc, last);
245+
uintptr_t not_visited = 1 ^ interp->gc.visited_space;
246+
gc->_gc_next = ((uintptr_t)generation0) | not_visited;
247+
generation0->_gc_prev = (uintptr_t)gc;
248+
#endif
249+
}
250+
251+
/* Tell the GC to stop tracking this object.
252+
*
253+
* Internal note: This may be called while GC. So _PyGC_PREV_MASK_COLLECTING
254+
* must be cleared. But _PyGC_PREV_MASK_FINALIZED bit is kept.
255+
*
256+
* The object must be tracked by the GC.
257+
*
258+
* See also the public PyObject_GC_UnTrack() which accept an object which is
259+
* not tracked.
260+
*/
261+
static inline void _PyObject_GC_UNTRACK(
262+
// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
263+
#ifndef NDEBUG
264+
const char *filename, int lineno,
265+
#endif
266+
PyObject *op)
267+
{
268+
_PyObject_ASSERT_FROM(op, _PyObject_GC_IS_TRACKED(op),
269+
"object not tracked by the garbage collector",
270+
filename, lineno, __func__);
271+
272+
#ifdef Py_GIL_DISABLED
273+
_PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_TRACKED);
274+
#else
275+
PyGC_Head *gc = _Py_AS_GC(op);
276+
PyGC_Head *prev = _PyGCHead_PREV(gc);
277+
PyGC_Head *next = _PyGCHead_NEXT(gc);
278+
_PyGCHead_SET_NEXT(prev, next);
279+
_PyGCHead_SET_PREV(next, prev);
280+
gc->_gc_next = 0;
281+
gc->_gc_prev &= _PyGC_PREV_MASK_FINALIZED;
282+
#endif
283+
}
284+
285+
286+
206287
/*
207288
NOTE: about untracking of mutable objects.
208289

Include/internal/pycore_interp.h

-20
Original file line numberDiff line numberDiff line change
@@ -100,9 +100,6 @@ extern void _PyInterpreterState_SetWhence(
100100
PyInterpreterState *interp,
101101
long whence);
102102

103-
extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp);
104-
105-
106103
/*
107104
Runtime Feature Flags
108105
@@ -137,23 +134,6 @@ PyAPI_FUNC(PyStatus) _PyInterpreterState_New(
137134
PyThreadState *tstate,
138135
PyInterpreterState **pinterp);
139136

140-
141-
#define RARE_EVENT_INTERP_INC(interp, name) \
142-
do { \
143-
/* saturating add */ \
144-
int val = FT_ATOMIC_LOAD_UINT8_RELAXED(interp->rare_events.name); \
145-
if (val < UINT8_MAX) { \
146-
FT_ATOMIC_STORE_UINT8(interp->rare_events.name, val + 1); \
147-
} \
148-
RARE_EVENT_STAT_INC(name); \
149-
} while (0); \
150-
151-
#define RARE_EVENT_INC(name) \
152-
do { \
153-
PyInterpreterState *interp = PyInterpreterState_Get(); \
154-
RARE_EVENT_INTERP_INC(interp, name); \
155-
} while (0); \
156-
157137
#ifdef __cplusplus
158138
}
159139
#endif

Include/internal/pycore_object.h

+2-95
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,11 @@ extern "C" {
99
#endif
1010

1111
#include <stdbool.h>
12-
#include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
1312
#include "pycore_emscripten_trampoline.h" // _PyCFunction_TrampolineCall()
14-
#include "pycore_interp.h" // PyInterpreterState.gc
13+
#include "pycore_object_deferred.h" // _PyObject_HasDeferredRefcount
1514
#include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_PTR_RELAXED
1615
#include "pycore_pystate.h" // _PyInterpreterState_GET()
17-
#include "pycore_stackref.h"
16+
#include "pycore_typeobject.h" // _PyStaticType_GetState()
1817
#include "pycore_uniqueid.h" // _PyObject_ThreadIncrefSlow()
1918

2019
// This value is added to `ob_ref_shared` for objects that use deferred
@@ -455,84 +454,6 @@ _PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size)
455454
Py_SET_SIZE(op, size);
456455
}
457456

458-
459-
/* Tell the GC to track this object.
460-
*
461-
* The object must not be tracked by the GC.
462-
*
463-
* NB: While the object is tracked by the collector, it must be safe to call the
464-
* ob_traverse method.
465-
*
466-
* Internal note: interp->gc.generation0->_gc_prev doesn't have any bit flags
467-
* because it's not object header. So we don't use _PyGCHead_PREV() and
468-
* _PyGCHead_SET_PREV() for it to avoid unnecessary bitwise operations.
469-
*
470-
* See also the public PyObject_GC_Track() function.
471-
*/
472-
static inline void _PyObject_GC_TRACK(
473-
// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
474-
#ifndef NDEBUG
475-
const char *filename, int lineno,
476-
#endif
477-
PyObject *op)
478-
{
479-
_PyObject_ASSERT_FROM(op, !_PyObject_GC_IS_TRACKED(op),
480-
"object already tracked by the garbage collector",
481-
filename, lineno, __func__);
482-
#ifdef Py_GIL_DISABLED
483-
_PyObject_SET_GC_BITS(op, _PyGC_BITS_TRACKED);
484-
#else
485-
PyGC_Head *gc = _Py_AS_GC(op);
486-
_PyObject_ASSERT_FROM(op,
487-
(gc->_gc_prev & _PyGC_PREV_MASK_COLLECTING) == 0,
488-
"object is in generation which is garbage collected",
489-
filename, lineno, __func__);
490-
491-
PyInterpreterState *interp = _PyInterpreterState_GET();
492-
PyGC_Head *generation0 = &interp->gc.young.head;
493-
PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev);
494-
_PyGCHead_SET_NEXT(last, gc);
495-
_PyGCHead_SET_PREV(gc, last);
496-
uintptr_t not_visited = 1 ^ interp->gc.visited_space;
497-
gc->_gc_next = ((uintptr_t)generation0) | not_visited;
498-
generation0->_gc_prev = (uintptr_t)gc;
499-
#endif
500-
}
501-
502-
/* Tell the GC to stop tracking this object.
503-
*
504-
* Internal note: This may be called while GC. So _PyGC_PREV_MASK_COLLECTING
505-
* must be cleared. But _PyGC_PREV_MASK_FINALIZED bit is kept.
506-
*
507-
* The object must be tracked by the GC.
508-
*
509-
* See also the public PyObject_GC_UnTrack() which accept an object which is
510-
* not tracked.
511-
*/
512-
static inline void _PyObject_GC_UNTRACK(
513-
// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
514-
#ifndef NDEBUG
515-
const char *filename, int lineno,
516-
#endif
517-
PyObject *op)
518-
{
519-
_PyObject_ASSERT_FROM(op, _PyObject_GC_IS_TRACKED(op),
520-
"object not tracked by the garbage collector",
521-
filename, lineno, __func__);
522-
523-
#ifdef Py_GIL_DISABLED
524-
_PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_TRACKED);
525-
#else
526-
PyGC_Head *gc = _Py_AS_GC(op);
527-
PyGC_Head *prev = _PyGCHead_PREV(gc);
528-
PyGC_Head *next = _PyGCHead_NEXT(gc);
529-
_PyGCHead_SET_NEXT(prev, next);
530-
_PyGCHead_SET_PREV(next, prev);
531-
gc->_gc_next = 0;
532-
gc->_gc_prev &= _PyGC_PREV_MASK_FINALIZED;
533-
#endif
534-
}
535-
536457
// Macros to accept any type for the parameter, and to automatically pass
537458
// the filename and the filename (if NDEBUG is not defined) where the macro
538459
// is called.
@@ -618,20 +539,6 @@ _Py_TryIncrefCompare(PyObject **src, PyObject *op)
618539
return 1;
619540
}
620541

621-
static inline int
622-
_Py_TryIncrefCompareStackRef(PyObject **src, PyObject *op, _PyStackRef *out)
623-
{
624-
if (_PyObject_HasDeferredRefcount(op)) {
625-
*out = (_PyStackRef){ .bits = (intptr_t)op | Py_TAG_DEFERRED };
626-
return 1;
627-
}
628-
if (_Py_TryIncrefCompare(src, op)) {
629-
*out = PyStackRef_FromPyObjectSteal(op);
630-
return 1;
631-
}
632-
return 0;
633-
}
634-
635542
/* Loads and increfs an object from ptr, which may contain a NULL value.
636543
Safe with concurrent (atomic) updates to ptr.
637544
NOTE: The writer must set maybe-weakref on the stored object! */

Include/internal/pycore_pystate.h

+2-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ extern "C" {
1111
#include "pycore_runtime_structs.h" // _PyRuntime
1212
#include "pycore_runtime.h" // _PyRuntimeState_GetFinalizing
1313
#include "pycore_tstate.h" // _PyThreadStateImpl
14-
#include "pycore_interp.h" // _PyInterpreterState_GetConfig
14+
15+
extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp);
1516

1617
// Values for PyThreadState.state. A thread must be in the "attached" state
1718
// before calling most Python APIs. If the GIL is enabled, then "attached"

Include/internal/pycore_stackref.h

+19
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ extern "C" {
1414
#endif
1515

1616
#include "pycore_object_deferred.h"
17+
#include "pycore_object.h"
1718

1819
#include <stddef.h>
1920
#include <stdbool.h>
@@ -639,6 +640,24 @@ PyStackRef_FunctionCheck(_PyStackRef stackref)
639640
return PyFunction_Check(PyStackRef_AsPyObjectBorrow(stackref));
640641
}
641642

643+
#ifdef Py_GIL_DISABLED
644+
645+
static inline int
646+
_Py_TryIncrefCompareStackRef(PyObject **src, PyObject *op, _PyStackRef *out)
647+
{
648+
if (_PyObject_HasDeferredRefcount(op)) {
649+
*out = (_PyStackRef){ .bits = (intptr_t)op | Py_TAG_DEFERRED };
650+
return 1;
651+
}
652+
if (_Py_TryIncrefCompare(src, op)) {
653+
*out = PyStackRef_FromPyObjectSteal(op);
654+
return 1;
655+
}
656+
return 0;
657+
}
658+
659+
#endif
660+
642661
#ifdef __cplusplus
643662
}
644663
#endif

0 commit comments

Comments
 (0)