-
-
Notifications
You must be signed in to change notification settings - Fork 34.2k
Expand file tree
/
Copy pathpycore_optimizer.h
More file actions
376 lines (309 loc) · 12.4 KB
/
pycore_optimizer.h
File metadata and controls
376 lines (309 loc) · 12.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
#ifndef Py_INTERNAL_OPTIMIZER_H
#define Py_INTERNAL_OPTIMIZER_H
#ifdef __cplusplus
extern "C" {
#endif
#ifndef Py_BUILD_CORE
# error "this header requires Py_BUILD_CORE define"
#endif
#include "pycore_typedefs.h" // _PyInterpreterFrame
#include "pycore_uop.h" // _PyUOpInstruction
#include "pycore_uop_ids.h"
#include "pycore_stackref.h" // _PyStackRef
#include "pycore_optimizer_types.h"
#include <stdbool.h>
typedef struct _PyJitUopBuffer {
_PyUOpInstruction *start;
_PyUOpInstruction *next;
_PyUOpInstruction *end;
} _PyJitUopBuffer;
typedef struct _JitOptRefBuffer {
JitOptRef *used;
JitOptRef *end;
} _JitOptRefBuffer;
typedef struct _JitOptContext {
char done;
char out_of_space;
bool contradiction;
// Has the builtins dict been watched?
bool builtins_watched;
// The current "executing" frame.
_Py_UOpsAbstractFrame *frame;
_Py_UOpsAbstractFrame frames[MAX_ABSTRACT_FRAME_DEPTH];
int curr_frame_depth;
// Arena for the symbolic types.
ty_arena t_arena;
/* To do -- We could make this more space efficient
* by using a single array and growing the stack and
* locals toward each other. */
_JitOptRefBuffer locals;
_JitOptRefBuffer stack;
JitOptRef locals_array[ABSTRACT_INTERP_LOCALS_SIZE];
JitOptRef stack_array[ABSTRACT_INTERP_STACK_SIZE];
_PyJitUopBuffer out_buffer;
_PyBloomFilter *dependencies;
} JitOptContext;
static inline void
uop_buffer_init(_PyJitUopBuffer *trace, _PyUOpInstruction *start, uint32_t size)
{
trace->next = trace->start = start;
trace->end = start + size;
}
static inline _PyUOpInstruction *
uop_buffer_last(_PyJitUopBuffer *trace)
{
assert(trace->next > trace->start);
return trace->next-1;
}
static inline int
uop_buffer_length(_PyJitUopBuffer *trace)
{
return (int)(trace->next - trace->start);
}
static inline int
uop_buffer_remaining_space(_PyJitUopBuffer *trace)
{
return (int)(trace->end - trace->next);
}
typedef struct _PyJitTracerInitialState {
int stack_depth;
int chain_depth;
struct _PyExitData *exit;
PyCodeObject *code; // Strong
PyFunctionObject *func; // Strong
struct _PyExecutorObject *executor; // Strong
_Py_CODEUNIT *start_instr;
_Py_CODEUNIT *close_loop_instr;
_Py_CODEUNIT *jump_backward_instr;
} _PyJitTracerInitialState;
typedef struct _PyJitTracerPreviousState {
int instr_oparg;
int instr_stacklevel;
_Py_CODEUNIT *instr;
PyCodeObject *instr_code; // Strong
struct _PyInterpreterFrame *instr_frame;
PyObject *recorded_value; // Strong, may be NULL
} _PyJitTracerPreviousState;
typedef struct _PyJitTracerTranslatorState {
int jump_backward_seen;
} _PyJitTracerTranslatorState;
typedef struct _PyJitTracerState {
bool is_tracing;
_PyJitTracerInitialState initial_state;
_PyJitTracerPreviousState prev_state;
_PyJitTracerTranslatorState translator_state;
JitOptContext opt_context;
_PyJitUopBuffer code_buffer;
_PyJitUopBuffer out_buffer;
_PyUOpInstruction uop_array[2 * UOP_MAX_TRACE_LENGTH];
} _PyJitTracerState;
typedef struct _PyExecutorLinkListNode {
struct _PyExecutorObject *next;
struct _PyExecutorObject *previous;
} _PyExecutorLinkListNode;
typedef struct {
uint8_t opcode;
uint8_t oparg;
uint8_t valid;
uint8_t chain_depth; // Must be big enough for MAX_CHAIN_DEPTH - 1.
bool cold;
uint8_t pending_deletion;
int32_t index; // Index of ENTER_EXECUTOR (if code isn't NULL, below).
_PyBloomFilter bloom;
_PyExecutorLinkListNode links;
PyCodeObject *code; // Weak (NULL if no corresponding ENTER_EXECUTOR).
} _PyVMData;
typedef struct _PyExitData {
uint32_t target;
uint16_t index:12;
uint16_t stack_cache:2;
uint16_t is_dynamic:1;
uint16_t is_control_flow:1;
_Py_BackoffCounter temperature;
struct _PyExecutorObject *executor;
} _PyExitData;
typedef struct _PyExecutorObject {
PyObject_VAR_HEAD
const _PyUOpInstruction *trace;
_PyVMData vm_data; /* Used by the VM, but opaque to the optimizer */
uint32_t exit_count;
uint32_t code_size;
size_t jit_size;
void *jit_code;
_PyExitData exits[1];
} _PyExecutorObject;
// Export for '_opcode' shared extension (JIT compiler).
PyAPI_FUNC(_PyExecutorObject*) _Py_GetExecutor(PyCodeObject *code, int offset);
void _Py_ExecutorInit(_PyExecutorObject *, const _PyBloomFilter *);
void _Py_ExecutorDetach(_PyExecutorObject *);
void _Py_BloomFilter_Init(_PyBloomFilter *);
void _Py_BloomFilter_Add(_PyBloomFilter *bloom, void *obj);
PyAPI_FUNC(void) _Py_Executor_DependsOn(_PyExecutorObject *executor, void *obj);
#define _Py_MAX_ALLOWED_BUILTINS_MODIFICATIONS 3
#define _Py_MAX_ALLOWED_GLOBALS_MODIFICATIONS 6
#ifdef _Py_TIER2
PyAPI_FUNC(void) _Py_Executors_InvalidateDependency(PyInterpreterState *interp, void *obj, int is_invalidation);
PyAPI_FUNC(void) _Py_Executors_InvalidateAll(PyInterpreterState *interp, int is_invalidation);
PyAPI_FUNC(void) _Py_Executors_InvalidateCold(PyInterpreterState *interp);
#else
# define _Py_Executors_InvalidateDependency(A, B, C) ((void)0)
# define _Py_Executors_InvalidateAll(A, B) ((void)0)
#endif
// Used as the threshold to trigger executor invalidation when
// executor_creation_counter is greater than this value.
// This value is arbitrary and was not optimized.
#define JIT_CLEANUP_THRESHOLD 1000
int _Py_uop_analyze_and_optimize(
_PyThreadStateImpl *tstate,
_PyUOpInstruction *input, int trace_len, int curr_stackentries,
_PyUOpInstruction *output, _PyBloomFilter *dependencies);
extern PyTypeObject _PyUOpExecutor_Type;
#define UOP_FORMAT_TARGET 0
#define UOP_FORMAT_JUMP 1
static inline uint32_t uop_get_target(const _PyUOpInstruction *inst)
{
assert(inst->format == UOP_FORMAT_TARGET);
return inst->target;
}
static inline uint16_t uop_get_jump_target(const _PyUOpInstruction *inst)
{
assert(inst->format == UOP_FORMAT_JUMP);
return inst->jump_target;
}
static inline uint16_t uop_get_error_target(const _PyUOpInstruction *inst)
{
assert(inst->format != UOP_FORMAT_TARGET);
return inst->error_target;
}
#define REF_IS_BORROWED 1
#define REF_IS_INVALID 2
#define REF_TAG_BITS 3
#define JIT_BITS_TO_PTR_MASKED(REF) ((JitOptSymbol *)(((REF).bits) & (~REF_TAG_BITS)))
static inline JitOptSymbol *
PyJitRef_Unwrap(JitOptRef ref)
{
return JIT_BITS_TO_PTR_MASKED(ref);
}
bool _Py_uop_symbol_is_immortal(JitOptSymbol *sym);
static inline JitOptRef
PyJitRef_Wrap(JitOptSymbol *sym)
{
return (JitOptRef){.bits=(uintptr_t)sym};
}
static inline JitOptRef
PyJitRef_WrapInvalid(void *ptr)
{
return (JitOptRef){.bits=(uintptr_t)ptr | REF_IS_INVALID};
}
static inline bool
PyJitRef_IsInvalid(JitOptRef ref)
{
return (ref.bits & REF_IS_INVALID) == REF_IS_INVALID;
}
static inline JitOptRef
PyJitRef_StripReferenceInfo(JitOptRef ref)
{
return PyJitRef_Wrap(PyJitRef_Unwrap(ref));
}
static inline JitOptRef
PyJitRef_Borrow(JitOptRef ref)
{
return (JitOptRef){ .bits = ref.bits | REF_IS_BORROWED };
}
static const JitOptRef PyJitRef_NULL = {.bits = REF_IS_BORROWED};
static inline bool
PyJitRef_IsNull(JitOptRef ref)
{
return ref.bits == PyJitRef_NULL.bits;
}
static inline int
PyJitRef_IsBorrowed(JitOptRef ref)
{
return (ref.bits & REF_IS_BORROWED) == REF_IS_BORROWED;
}
extern bool _Py_uop_sym_is_null(JitOptRef sym);
extern bool _Py_uop_sym_is_not_null(JitOptRef sym);
extern bool _Py_uop_sym_is_const(JitOptContext *ctx, JitOptRef sym);
extern PyObject *_Py_uop_sym_get_const(JitOptContext *ctx, JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_unknown(JitOptContext *ctx);
extern JitOptRef _Py_uop_sym_new_not_null(JitOptContext *ctx);
extern JitOptRef _Py_uop_sym_new_type(
JitOptContext *ctx, PyTypeObject *typ);
extern JitOptRef _Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val);
extern JitOptRef _Py_uop_sym_new_const_steal(JitOptContext *ctx, PyObject *const_val);
bool _Py_uop_sym_is_safe_const(JitOptContext *ctx, JitOptRef sym);
_PyStackRef _Py_uop_sym_get_const_as_stackref(JitOptContext *ctx, JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_null(JitOptContext *ctx);
extern bool _Py_uop_sym_has_type(JitOptRef sym);
extern bool _Py_uop_sym_matches_type(JitOptRef sym, PyTypeObject *typ);
extern bool _Py_uop_sym_matches_type_version(JitOptRef sym, unsigned int version);
extern void _Py_uop_sym_set_null(JitOptContext *ctx, JitOptRef sym);
extern void _Py_uop_sym_set_non_null(JitOptContext *ctx, JitOptRef sym);
extern void _Py_uop_sym_set_type(JitOptContext *ctx, JitOptRef sym, PyTypeObject *typ);
extern bool _Py_uop_sym_set_type_version(JitOptContext *ctx, JitOptRef sym, unsigned int version);
extern void _Py_uop_sym_set_const(JitOptContext *ctx, JitOptRef sym, PyObject *const_val);
extern bool _Py_uop_sym_is_bottom(JitOptRef sym);
extern int _Py_uop_sym_truthiness(JitOptContext *ctx, JitOptRef sym);
extern PyTypeObject *_Py_uop_sym_get_type(JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_tuple(JitOptContext *ctx, int size, JitOptRef *args);
extern JitOptRef _Py_uop_sym_tuple_getitem(JitOptContext *ctx, JitOptRef sym, Py_ssize_t item);
extern Py_ssize_t _Py_uop_sym_tuple_length(JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_truthiness(JitOptContext *ctx, JitOptRef value, bool truthy);
extern bool _Py_uop_sym_is_compact_int(JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_compact_int(JitOptContext *ctx);
extern void _Py_uop_sym_set_compact_int(JitOptContext *ctx, JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_predicate(JitOptContext *ctx, JitOptRef lhs_ref, JitOptRef rhs_ref, JitOptPredicateKind kind);
extern void _Py_uop_sym_apply_predicate_narrowing(JitOptContext *ctx, JitOptRef sym, bool branch_is_true);
extern void _Py_uop_sym_set_recorded_value(JitOptContext *ctx, JitOptRef sym, PyObject *value);
extern void _Py_uop_sym_set_recorded_type(JitOptContext *ctx, JitOptRef sym, PyTypeObject *type);
extern void _Py_uop_sym_set_recorded_gen_func(JitOptContext *ctx, JitOptRef ref, PyFunctionObject *value);
extern PyCodeObject *_Py_uop_sym_get_probable_func_code(JitOptRef sym);
extern PyObject *_Py_uop_sym_get_probable_value(JitOptRef sym);
extern PyTypeObject *_Py_uop_sym_get_probable_type(JitOptRef sym);
extern JitOptRef *_Py_uop_sym_set_stack_depth(JitOptContext *ctx, int stack_depth, JitOptRef *current_sp);
extern void _Py_uop_abstractcontext_init(JitOptContext *ctx, _PyBloomFilter *dependencies);
extern void _Py_uop_abstractcontext_fini(JitOptContext *ctx);
extern _Py_UOpsAbstractFrame *_Py_uop_frame_new(
JitOptContext *ctx,
PyCodeObject *co,
JitOptRef *args,
int arg_len);
extern _Py_UOpsAbstractFrame *_Py_uop_frame_new_from_symbol(
JitOptContext *ctx,
JitOptRef callable,
JitOptRef *args,
int arg_len);
extern int _Py_uop_frame_pop(JitOptContext *ctx, PyCodeObject *co);
PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored);
PyAPI_FUNC(int) _PyOptimizer_Optimize(_PyInterpreterFrame *frame, PyThreadState *tstate);
static inline _PyExecutorObject *_PyExecutor_FromExit(_PyExitData *exit)
{
_PyExitData *exit0 = exit - exit->index;
return (_PyExecutorObject *)(((char *)exit0) - offsetof(_PyExecutorObject, exits));
}
extern _PyExecutorObject *_PyExecutor_GetColdExecutor(void);
extern _PyExecutorObject *_PyExecutor_GetColdDynamicExecutor(void);
PyAPI_FUNC(void) _PyExecutor_ClearExit(_PyExitData *exit);
extern void _PyExecutor_Free(_PyExecutorObject *self);
PyAPI_FUNC(int) _PyDumpExecutors(FILE *out);
#ifdef _Py_TIER2
PyAPI_FUNC(void) _Py_ClearExecutorDeletionList(PyInterpreterState *interp);
#endif
PyAPI_FUNC(int) _PyJit_translate_single_bytecode_to_trace(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *next_instr, int stop_tracing_opcode);
PyAPI_FUNC(int)
_PyJit_TryInitializeTracing(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *curr_instr, _Py_CODEUNIT *start_instr,
_Py_CODEUNIT *close_loop_instr, _PyStackRef *stack_pointer, int chain_depth, _PyExitData *exit,
int oparg, _PyExecutorObject *current_executor);
PyAPI_FUNC(void) _PyJit_FinalizeTracing(PyThreadState *tstate, int err);
void _PyPrintExecutor(_PyExecutorObject *executor, const _PyUOpInstruction *marker);
void _PyJit_TracerFree(_PyThreadStateImpl *_tstate);
#ifdef _Py_TIER2
typedef void (*_Py_RecordFuncPtr)(_PyInterpreterFrame *frame, _PyStackRef *stackpointer, int oparg, PyObject **recorded_value);
PyAPI_DATA(const _Py_RecordFuncPtr) _PyOpcode_RecordFunctions[];
PyAPI_DATA(const uint8_t) _PyOpcode_RecordFunctionIndices[256];
#endif
#ifdef __cplusplus
}
#endif
#endif /* !Py_INTERNAL_OPTIMIZER_H */