-
-
Notifications
You must be signed in to change notification settings - Fork 34.6k
Expand file tree
/
Copy pathbytecodes.c
More file actions
5293 lines (4743 loc) · 213 KB
/
bytecodes.c
File metadata and controls
5293 lines (4743 loc) · 213 KB
Edit and raw actions
OlderNewer
1
// This file contains instruction definitions.
2
// It is read by generators stored in Tools/cases_generator/
3
// to generate Python/generated_cases.c.h and others.
4
// Note that there is some dummy C code at the top and bottom of the file
5
// to fool text editors like VS Code into believing this is valid C code.
6
// The actual instruction definitions start at // BEGIN BYTECODES //.
7
// See Tools/cases_generator/README.md for more information.
8
9
#include "Python.h"
10
#include "pycore_abstract.h" // _PyIndex_Check()
11
#include "pycore_audit.h" // _PySys_Audit()
12
#include "pycore_backoff.h"
13
#include "pycore_cell.h" // PyCell_GetRef()
14
#include "pycore_ceval.h"
15
#include "pycore_code.h"
16
#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS
17
#include "pycore_function.h"
18
#include "pycore_instruments.h"
19
#include "pycore_intrinsics.h"
20
#include "pycore_long.h" // _PyLong_GetZero()
21
#include "pycore_moduleobject.h" // PyModuleObject
22
#include "pycore_object.h" // _PyObject_GC_TRACK()
23
#include "pycore_opcode_metadata.h" // uop names
24
#include "pycore_opcode_utils.h" // MAKE_FUNCTION_*
25
#include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_*
26
#include "pycore_pyerrors.h" // _PyErr_GetRaisedException()
27
#include "pycore_pystate.h" // _PyInterpreterState_GET()
28
#include "pycore_range.h" // _PyRangeIterObject
29
#include "pycore_long.h" // _PyLong_ExactDealloc()
30
#include "pycore_setobject.h" // _PySet_NextEntry()
31
#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs
32
#include "pycore_tuple.h" // _PyTuple_ITEMS()
33
#include "pycore_typeobject.h" // _PySuper_Lookup()
34
35
#include "pycore_dict.h"
36
#include "dictobject.h"
37
#include "pycore_frame.h"
38
#include "opcode.h"
39
#include "optimizer.h"
40
#include "pydtrace.h"
41
#include "setobject.h"
42
43
44
#define USE_COMPUTED_GOTOS 0
45
#include "ceval_macros.h"
46
47
/* Flow control macros */
48
#define GO_TO_INSTRUCTION(instname) ((void)0)
49
50
#define inst(name, ...) case name:
51
#define op(name, ...) /* NAME is ignored */
52
#define macro(name) static int MACRO_##name
53
#define super(name) static int SUPER_##name
54
#define family(name, ...) static int family_##name
55
#define pseudo(name) static int pseudo_##name
56
#define label(name) name:
57
58
/* Annotations */
59
#define guard
60
#define override
61
#define specializing
62
#define split
63
#define replicate(TIMES)
64
#define tier1
65
#define no_save_ip
66
67
// Dummy variables for stack effects.
68
static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub;
69
static PyObject *container, *start, *stop, *v, *lhs, *rhs, *res2;
70
static PyObject *list, *tuple, *dict, *owner, *set, *str, *tup, *map, *keys;
71
static PyObject *exit_func, *lasti, *val, *retval, *obj, *iter, *exhausted;
72
static PyObject *aiter, *awaitable, *iterable, *w, *exc_value, *bc, *locals;
73
static PyObject *orig, *excs, *update, *b, *fromlist, *level, *from;
74
static PyObject **pieces, **values;
75
static size_t jump;
76
// Dummy variables for cache effects
77
static uint16_t invert, counter, index, hint;
78
#define unused 0 // Used in a macro def, can't be static
79
static uint32_t type_version;
80
static _PyExecutorObject *current_executor;
81
82
static PyObject *
83
dummy_func(
84
PyThreadState *tstate,
85
_PyInterpreterFrame *frame,
86
unsigned char opcode,
87
unsigned int oparg,
88
_Py_CODEUNIT *next_instr,
89
PyObject **stack_pointer,
90
int throwflag,
91
PyObject *args[]
92
)
93
{
94
// Dummy labels.
95
pop_1_error:
96
// Dummy locals.
97
PyObject *dummy;
98
_Py_CODEUNIT *this_instr;
99
PyObject *attr;
100
PyObject *attrs;
101
PyObject *bottom;
102
PyObject *callable;
103
PyObject *callargs;
104
PyObject *codeobj;
105
PyObject *cond;
106
PyObject *descr;
107
PyObject *exc;
108
PyObject *exit;
109
PyObject *fget;
110
PyObject *fmt_spec;
111
PyObject *func;
112
uint32_t func_version;
113
PyObject *getattribute;
114
PyObject *kwargs;
115
PyObject *kwdefaults;
116
PyObject *len_o;
117
PyObject *match;
118
PyObject *match_type;
119
PyObject *method;
120
PyObject *mgr;
121
Py_ssize_t min_args;
122
PyObject *names;
123
PyObject *new_exc;
124
PyObject *next;
125
PyObject *none;
126
PyObject *null;
127
PyObject *prev_exc;
128
PyObject *receiver;
129
PyObject *rest;
130
int result;
131
PyObject *self;
132
PyObject *seq;
133
PyObject *slice;
134
PyObject *step;
135
PyObject *subject;
136
PyObject *top;
137
PyObject *type;
138
PyObject *typevars;
139
PyObject *val0;
140
PyObject *val1;
141
int values_or_none;
142
143
switch (opcode) {
144
145
// BEGIN BYTECODES //
146
pure inst(NOP, (--)) {
147
}
148
149
family(RESUME, 0) = {
150
RESUME_CHECK,
151
};
152
153
macro(NOT_TAKEN) = NOP;
154
155
op(_CHECK_PERIODIC, (--)) {
156
_Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY();
157
QSBR_QUIESCENT_STATE(tstate);
158
if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
159
int err = _Py_HandlePending(tstate);
160
ERROR_IF(err != 0, error);
161
}
162
}
163
164
op(_CHECK_PERIODIC_IF_NOT_YIELD_FROM, (--)) {
165
if ((oparg & RESUME_OPARG_LOCATION_MASK) < RESUME_AFTER_YIELD_FROM) {
166
_Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY();
167
QSBR_QUIESCENT_STATE(tstate); \
168
if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
169
int err = _Py_HandlePending(tstate);
170
ERROR_IF(err != 0, error);
171
}
172
}
173
}
174
175
op(_QUICKEN_RESUME, (--)) {
176
#if ENABLE_SPECIALIZATION_FT
177
if (tstate->tracing == 0 && this_instr->op.code == RESUME) {
178
FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, RESUME_CHECK);
179
}
180
#endif /* ENABLE_SPECIALIZATION_FT */
181
}
182
183
tier1 op(_MAYBE_INSTRUMENT, (--)) {
184
if (tstate->tracing == 0) {
185
uintptr_t global_version = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & ~_PY_EVAL_EVENTS_MASK;
186
uintptr_t code_version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version);
187
if (code_version != global_version) {
188
int err = _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp);
189
if (err) {
190
ERROR_NO_POP();
191
}
192
next_instr = this_instr;
193
DISPATCH();
194
}
195
}
196
}
197
198
op(_LOAD_BYTECODE, (--)) {
199
#ifdef Py_GIL_DISABLED
200
if (frame->tlbc_index !=
201
((_PyThreadStateImpl *)tstate)->tlbc_index) {
202
_Py_CODEUNIT *bytecode =
203
_PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame));
204
ERROR_IF(bytecode == NULL, error);
205
ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame);
206
frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index;
207
frame->instr_ptr = bytecode + off;
208
// Make sure this_instr gets reset correctley for any uops that
209
// follow
210
next_instr = frame->instr_ptr;
211
DISPATCH();
212
}
213
#endif
214
}
215
216
macro(RESUME) =
217
_LOAD_BYTECODE +
218
_MAYBE_INSTRUMENT +
219
_QUICKEN_RESUME +
220
_CHECK_PERIODIC_IF_NOT_YIELD_FROM;
221
222
inst(RESUME_CHECK, (--)) {
223
#if defined(__EMSCRIPTEN__)
224
DEOPT_IF(_Py_emscripten_signal_clock == 0);
225
_Py_emscripten_signal_clock -= Py_EMSCRIPTEN_SIGNAL_HANDLING;
226
#endif
227
uintptr_t eval_breaker = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker);
228
uintptr_t version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version);
229
assert((version & _PY_EVAL_EVENTS_MASK) == 0);
230
DEOPT_IF(eval_breaker != version);
231
#ifdef Py_GIL_DISABLED
232
DEOPT_IF(frame->tlbc_index !=
233
((_PyThreadStateImpl *)tstate)->tlbc_index);
234
#endif
235
}
236
237
op(_MONITOR_RESUME, (--)) {
238
int err = _Py_call_instrumentation(
239
tstate, oparg > 0, frame, this_instr);
240
ERROR_IF(err, error);
241
if (frame->instr_ptr != this_instr) {
242
/* Instrumentation has jumped */
243
next_instr = frame->instr_ptr;
244
}
245
}
246
247
macro(INSTRUMENTED_RESUME) =
248
_LOAD_BYTECODE +
249
_MAYBE_INSTRUMENT +
250
_CHECK_PERIODIC_IF_NOT_YIELD_FROM +
251
_MONITOR_RESUME;
252
253
pseudo(LOAD_CLOSURE, (-- unused)) = {
254
LOAD_FAST,
255
};
256
257
inst(LOAD_FAST_CHECK, (-- value)) {
258
_PyStackRef value_s = GETLOCAL(oparg);
259
if (PyStackRef_IsNull(value_s)) {
260
_PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError,
261
UNBOUNDLOCAL_ERROR_MSG,
262
PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg)
263
);
264
ERROR_IF(1, error);
265
}
266
value = PyStackRef_DUP(value_s);
267
}
268
269
replicate(8) pure inst(LOAD_FAST, (-- value)) {
270
assert(!PyStackRef_IsNull(GETLOCAL(oparg)));
271
value = PyStackRef_DUP(GETLOCAL(oparg));
272
}
273
274
inst(LOAD_FAST_AND_CLEAR, (-- value)) {
275
value = GETLOCAL(oparg);
276
// do not use SETLOCAL here, it decrefs the old value
277
GETLOCAL(oparg) = PyStackRef_NULL;
278
}
279
280
inst(LOAD_FAST_LOAD_FAST, ( -- value1, value2)) {
281
uint32_t oparg1 = oparg >> 4;
282
uint32_t oparg2 = oparg & 15;
283
value1 = PyStackRef_DUP(GETLOCAL(oparg1));
284
value2 = PyStackRef_DUP(GETLOCAL(oparg2));
285
}
286
287
family(LOAD_CONST, 0) = {
288
LOAD_CONST_MORTAL,
289
LOAD_CONST_IMMORTAL,
290
};
291
292
inst(LOAD_CONST, (-- value)) {
293
/* We can't do this in the bytecode compiler as
294
* marshalling can intern strings and make them immortal. */
295
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
296
value = PyStackRef_FromPyObjectNew(obj);
297
#if ENABLE_SPECIALIZATION
298
if (this_instr->op.code == LOAD_CONST) {
299
this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL;
300
}
301
#endif
302
}
303
304
inst(LOAD_CONST_MORTAL, (-- value)) {
305
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
306
value = PyStackRef_FromPyObjectNew(obj);
307
}
308
309
inst(LOAD_CONST_IMMORTAL, (-- value)) {
310
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
311
assert(_Py_IsImmortal(obj));
312
value = PyStackRef_FromPyObjectImmortal(obj);
313
}
314
315
replicate(4) inst(LOAD_SMALL_INT, (-- value)) {
316
assert(oparg < _PY_NSMALLPOSINTS);
317
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
318
value = PyStackRef_FromPyObjectImmortal(obj);
319
}
320
321
replicate(8) inst(STORE_FAST, (value --)) {
322
SETLOCAL(oparg, value);
323
DEAD(value);
324
}
325
326
pseudo(STORE_FAST_MAYBE_NULL, (unused --)) = {
327
STORE_FAST,
328
};
329
330
inst(STORE_FAST_LOAD_FAST, (value1 -- value2)) {
331
uint32_t oparg1 = oparg >> 4;
332
uint32_t oparg2 = oparg & 15;
333
SETLOCAL(oparg1, value1);
334
DEAD(value1);
335
value2 = PyStackRef_DUP(GETLOCAL(oparg2));
336
}
337
338
inst(STORE_FAST_STORE_FAST, (value2, value1 --)) {
339
uint32_t oparg1 = oparg >> 4;
340
uint32_t oparg2 = oparg & 15;
341
SETLOCAL(oparg1, value1);
342
DEAD(value1);
343
SETLOCAL(oparg2, value2);
344
DEAD(value2);
345
}
346
347
pure inst(POP_TOP, (value --)) {
348
DECREF_INPUTS();
349
}
350
351
pure inst(PUSH_NULL, (-- res)) {
352
res = PyStackRef_NULL;
353
}
354
355
no_save_ip inst(END_FOR, (value -- )) {
356
/* Don't update instr_ptr, so that POP_ITER sees
357
* the FOR_ITER as the previous instruction.
358
* This has the benign side effect that if value is
359
* finalized it will see the location as the FOR_ITER's.
360
*/
361
PyStackRef_CLOSE(value);
362
}
363
364
macro(POP_ITER) = POP_TOP;
365
366
no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) {
367
/* Need to create a fake StopIteration error here,
368
* to conform to PEP 380 */
369
if (PyStackRef_GenCheck(receiver)) {
370
int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value));
371
if (err) {
372
ERROR_NO_POP();
373
}
374
}
375
DECREF_INPUTS();
376
}
377
378
tier1 inst(INSTRUMENTED_POP_ITER, (iter -- )) {
379
INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT);
380
PyStackRef_CLOSE(iter);
381
}
382
383
pure inst(END_SEND, (receiver, value -- val)) {
384
(void)receiver;
385
val = value;
386
DEAD(value);
387
DECREF_INPUTS();
388
}
389
390
tier1 inst(INSTRUMENTED_END_SEND, (receiver, value -- val)) {
391
PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver);
392
if (PyGen_Check(receiver_o) || PyCoro_CheckExact(receiver_o)) {
393
int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value));
394
if (err) {
395
ERROR_NO_POP();
396
}
397
}
398
val = value;
399
DEAD(value);
400
PyStackRef_CLOSE(receiver);
401
}
402
403
inst(UNARY_NEGATIVE, (value -- res)) {
404
PyObject *res_o = PyNumber_Negative(PyStackRef_AsPyObjectBorrow(value));
405
DECREF_INPUTS();
406
ERROR_IF(res_o == NULL, error);
407
res = PyStackRef_FromPyObjectSteal(res_o);
408
}
409
410
pure inst(UNARY_NOT, (value -- res)) {
411
assert(PyStackRef_BoolCheck(value));
412
res = PyStackRef_IsFalse(value)
413
? PyStackRef_True : PyStackRef_False;
414
DEAD(value);
415
}
416
417
family(TO_BOOL, INLINE_CACHE_ENTRIES_TO_BOOL) = {
418
TO_BOOL_ALWAYS_TRUE,
419
TO_BOOL_BOOL,
420
TO_BOOL_INT,
421
TO_BOOL_LIST,
422
TO_BOOL_NONE,
423
TO_BOOL_STR,
424
};
425
426
specializing op(_SPECIALIZE_TO_BOOL, (counter/1, value -- value)) {
427
#if ENABLE_SPECIALIZATION_FT
428
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
429
next_instr = this_instr;
430
_Py_Specialize_ToBool(value, next_instr);
431
DISPATCH_SAME_OPARG();
432
}
433
OPCODE_DEFERRED_INC(TO_BOOL);
434
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
435
#endif /* ENABLE_SPECIALIZATION_FT */
436
}
437
438
op(_TO_BOOL, (value -- res)) {
439
int err = PyObject_IsTrue(PyStackRef_AsPyObjectBorrow(value));
440
DECREF_INPUTS();
441
ERROR_IF(err < 0, error);
442
res = err ? PyStackRef_True : PyStackRef_False;
443
}
444
445
macro(TO_BOOL) = _SPECIALIZE_TO_BOOL + unused/2 + _TO_BOOL;
446
447
inst(TO_BOOL_BOOL, (unused/1, unused/2, value -- value)) {
448
EXIT_IF(!PyStackRef_BoolCheck(value));
449
STAT_INC(TO_BOOL, hit);
450
}
451
452
inst(TO_BOOL_INT, (unused/1, unused/2, value -- res)) {
453
PyObject *value_o = PyStackRef_AsPyObjectBorrow(value);
454
EXIT_IF(!PyLong_CheckExact(value_o));
455
STAT_INC(TO_BOOL, hit);
456
if (_PyLong_IsZero((PyLongObject *)value_o)) {
457
assert(_Py_IsImmortal(value_o));
458
DEAD(value);
459
res = PyStackRef_False;
460
}
461
else {
462
DECREF_INPUTS();
463
res = PyStackRef_True;
464
}
465
}
466
467
inst(TO_BOOL_LIST, (unused/1, unused/2, value -- res)) {
468
PyObject *value_o = PyStackRef_AsPyObjectBorrow(value);
469
EXIT_IF(!PyList_CheckExact(value_o));
470
STAT_INC(TO_BOOL, hit);
471
res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False;
472
DECREF_INPUTS();
473
}
474
475
inst(TO_BOOL_NONE, (unused/1, unused/2, value -- res)) {
476
// This one is a bit weird, because we expect *some* failures:
477
EXIT_IF(!PyStackRef_IsNone(value));
478
DEAD(value);
479
STAT_INC(TO_BOOL, hit);
480
res = PyStackRef_False;
481
}
482
483
inst(TO_BOOL_STR, (unused/1, unused/2, value -- res)) {
484
PyObject *value_o = PyStackRef_AsPyObjectBorrow(value);
485
EXIT_IF(!PyUnicode_CheckExact(value_o));
486
STAT_INC(TO_BOOL, hit);
487
if (value_o == &_Py_STR(empty)) {
488
assert(_Py_IsImmortal(value_o));
489
DEAD(value);
490
res = PyStackRef_False;
491
}
492
else {
493
assert(Py_SIZE(value_o));
494
DECREF_INPUTS();
495
res = PyStackRef_True;
496
}
497
}
498
499
op(_REPLACE_WITH_TRUE, (value -- res)) {
500
DECREF_INPUTS();
501
res = PyStackRef_True;
502
}
503
504
macro(TO_BOOL_ALWAYS_TRUE) =
505
unused/1 +
506
_GUARD_TYPE_VERSION +
507
_REPLACE_WITH_TRUE;
508
509
inst(UNARY_INVERT, (value -- res)) {
510
PyObject *res_o = PyNumber_Invert(PyStackRef_AsPyObjectBorrow(value));
511
DECREF_INPUTS();
512
ERROR_IF(res_o == NULL, error);
513
res = PyStackRef_FromPyObjectSteal(res_o);
514
}
515
516
family(BINARY_OP, INLINE_CACHE_ENTRIES_BINARY_OP) = {
517
BINARY_OP_MULTIPLY_INT,
518
BINARY_OP_ADD_INT,
519
BINARY_OP_SUBTRACT_INT,
520
BINARY_OP_MULTIPLY_FLOAT,
521
BINARY_OP_ADD_FLOAT,
522
BINARY_OP_SUBTRACT_FLOAT,
523
BINARY_OP_ADD_UNICODE,
524
// BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode.
525
BINARY_OP_EXTEND,
526
};
527
528
op(_GUARD_BOTH_INT, (left, right -- left, right)) {
529
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
530
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
531
EXIT_IF(!PyLong_CheckExact(left_o));
532
EXIT_IF(!PyLong_CheckExact(right_o));
533
}
534
535
op(_GUARD_NOS_INT, (left, unused -- left, unused)) {
536
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
537
EXIT_IF(!PyLong_CheckExact(left_o));
538
}
539
540
op(_GUARD_TOS_INT, (value -- value)) {
541
PyObject *value_o = PyStackRef_AsPyObjectBorrow(value);
542
EXIT_IF(!PyLong_CheckExact(value_o));
543
}
544
545
pure op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) {
546
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
547
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
548
assert(PyLong_CheckExact(left_o));
549
assert(PyLong_CheckExact(right_o));
550
551
STAT_INC(BINARY_OP, hit);
552
PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o);
553
PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc);
554
PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc);
555
INPUTS_DEAD();
556
ERROR_IF(res_o == NULL, error);
557
res = PyStackRef_FromPyObjectSteal(res_o);
558
}
559
560
pure op(_BINARY_OP_ADD_INT, (left, right -- res)) {
561
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
562
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
563
assert(PyLong_CheckExact(left_o));
564
assert(PyLong_CheckExact(right_o));
565
566
STAT_INC(BINARY_OP, hit);
567
PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o);
568
PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc);
569
PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc);
570
INPUTS_DEAD();
571
ERROR_IF(res_o == NULL, error);
572
res = PyStackRef_FromPyObjectSteal(res_o);
573
}
574
575
pure op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) {
576
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
577
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
578
assert(PyLong_CheckExact(left_o));
579
assert(PyLong_CheckExact(right_o));
580
581
STAT_INC(BINARY_OP, hit);
582
PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o);
583
PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc);
584
PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc);
585
INPUTS_DEAD();
586
ERROR_IF(res_o == NULL, error);
587
res = PyStackRef_FromPyObjectSteal(res_o);
588
}
589
590
macro(BINARY_OP_MULTIPLY_INT) =
591
_GUARD_BOTH_INT + unused/5 + _BINARY_OP_MULTIPLY_INT;
592
macro(BINARY_OP_ADD_INT) =
593
_GUARD_BOTH_INT + unused/5 + _BINARY_OP_ADD_INT;
594
macro(BINARY_OP_SUBTRACT_INT) =
595
_GUARD_BOTH_INT + unused/5 + _BINARY_OP_SUBTRACT_INT;
596
597
op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) {
598
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
599
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
600
EXIT_IF(!PyFloat_CheckExact(left_o));
601
EXIT_IF(!PyFloat_CheckExact(right_o));
602
}
603
604
op(_GUARD_NOS_FLOAT, (left, unused -- left, unused)) {
605
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
606
EXIT_IF(!PyFloat_CheckExact(left_o));
607
}
608
609
op(_GUARD_TOS_FLOAT, (value -- value)) {
610
PyObject *value_o = PyStackRef_AsPyObjectBorrow(value);
611
EXIT_IF(!PyFloat_CheckExact(value_o));
612
}
613
614
pure op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) {
615
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
616
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
617
assert(PyFloat_CheckExact(left_o));
618
assert(PyFloat_CheckExact(right_o));
619
620
STAT_INC(BINARY_OP, hit);
621
double dres =
622
((PyFloatObject *)left_o)->ob_fval *
623
((PyFloatObject *)right_o)->ob_fval;
624
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
625
INPUTS_DEAD();
626
ERROR_IF(res_o == NULL, error);
627
res = PyStackRef_FromPyObjectSteal(res_o);
628
}
629
630
pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) {
631
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
632
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
633
assert(PyFloat_CheckExact(left_o));
634
assert(PyFloat_CheckExact(right_o));
635
636
STAT_INC(BINARY_OP, hit);
637
double dres =
638
((PyFloatObject *)left_o)->ob_fval +
639
((PyFloatObject *)right_o)->ob_fval;
640
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
641
INPUTS_DEAD();
642
ERROR_IF(res_o == NULL, error);
643
res = PyStackRef_FromPyObjectSteal(res_o);
644
}
645
646
pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) {
647
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
648
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
649
assert(PyFloat_CheckExact(left_o));
650
assert(PyFloat_CheckExact(right_o));
651
652
STAT_INC(BINARY_OP, hit);
653
double dres =
654
((PyFloatObject *)left_o)->ob_fval -
655
((PyFloatObject *)right_o)->ob_fval;
656
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
657
INPUTS_DEAD();
658
ERROR_IF(res_o == NULL, error);
659
res = PyStackRef_FromPyObjectSteal(res_o);
660
}
661
662
macro(BINARY_OP_MULTIPLY_FLOAT) =
663
_GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_MULTIPLY_FLOAT;
664
macro(BINARY_OP_ADD_FLOAT) =
665
_GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_ADD_FLOAT;
666
macro(BINARY_OP_SUBTRACT_FLOAT) =
667
_GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_SUBTRACT_FLOAT;
668
669
op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) {
670
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
671
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
672
673
EXIT_IF(!PyUnicode_CheckExact(left_o));
674
EXIT_IF(!PyUnicode_CheckExact(right_o));
675
}
676
677
pure op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) {
678
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
679
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
680
assert(PyUnicode_CheckExact(left_o));
681
assert(PyUnicode_CheckExact(right_o));
682
683
STAT_INC(BINARY_OP, hit);
684
PyObject *res_o = PyUnicode_Concat(left_o, right_o);
685
PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc);
686
PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
687
INPUTS_DEAD();
688
ERROR_IF(res_o == NULL, error);
689
res = PyStackRef_FromPyObjectSteal(res_o);
690
}
691
692
macro(BINARY_OP_ADD_UNICODE) =
693
_GUARD_BOTH_UNICODE + unused/5 + _BINARY_OP_ADD_UNICODE;
694
695
// This is a subtle one. It's a super-instruction for
696
// BINARY_OP_ADD_UNICODE followed by STORE_FAST
697
// where the store goes into the left argument.
698
// So the inputs are the same as for all BINARY_OP
699
// specializations, but there is no output.
700
// At the end we just skip over the STORE_FAST.
701
op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
702
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
703
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
704
assert(PyUnicode_CheckExact(left_o));
705
assert(PyUnicode_CheckExact(right_o));
706
707
int next_oparg;
708
#if TIER_ONE
709
assert(next_instr->op.code == STORE_FAST);
710
next_oparg = next_instr->op.arg;
711
#else
712
next_oparg = CURRENT_OPERAND0();
713
#endif
714
_PyStackRef *target_local = &GETLOCAL(next_oparg);
715
DEOPT_IF(PyStackRef_AsPyObjectBorrow(*target_local) != left_o);
716
STAT_INC(BINARY_OP, hit);
717
/* Handle `left = left + right` or `left += right` for str.
718
*
719
* When possible, extend `left` in place rather than
720
* allocating a new PyUnicodeObject. This attempts to avoid
721
* quadratic behavior when one neglects to use str.join().
722
*
723
* If `left` has only two references remaining (one from
724
* the stack, one in the locals), DECREFing `left` leaves
725
* only the locals reference, so PyUnicode_Append knows
726
* that the string is safe to mutate.
727
*/
728
assert(Py_REFCNT(left_o) >= 2);
729
PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
730
DEAD(left);
731
PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local);
732
PyUnicode_Append(&temp, right_o);
733
*target_local = PyStackRef_FromPyObjectSteal(temp);
734
PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc);
735
DEAD(right);
736
ERROR_IF(PyStackRef_IsNull(*target_local), error);
737
#if TIER_ONE
738
// The STORE_FAST is already done. This is done here in tier one,
739
// and during trace projection in tier two:
740
assert(next_instr->op.code == STORE_FAST);
741
SKIP_OVER(1);
742
#endif
743
}
744
745
op(_GUARD_BINARY_OP_EXTEND, (descr/4, left, right -- left, right)) {
746
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
747
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
748
_PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr;
749
assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5);
750
assert(d && d->guard);
751
int res = d->guard(left_o, right_o);
752
EXIT_IF(!res);
753
}
754
755
pure op(_BINARY_OP_EXTEND, (descr/4, left, right -- res)) {
756
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
757
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
758
assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5);
759
_PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr;
760
761
STAT_INC(BINARY_OP, hit);
762
763
PyObject *res_o = d->action(left_o, right_o);
764
DECREF_INPUTS();
765
res = PyStackRef_FromPyObjectSteal(res_o);
766
}
767
768
macro(BINARY_OP_EXTEND) =
769
unused/1 + _GUARD_BINARY_OP_EXTEND + rewind/-4 + _BINARY_OP_EXTEND;
770
771
macro(BINARY_OP_INPLACE_ADD_UNICODE) =
772
_GUARD_BOTH_UNICODE + unused/5 + _BINARY_OP_INPLACE_ADD_UNICODE;
773
774
family(BINARY_SUBSCR, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = {
775
BINARY_SUBSCR_DICT,
776
BINARY_SUBSCR_GETITEM,
777
BINARY_SUBSCR_LIST_INT,
778
BINARY_SUBSCR_STR_INT,
779
BINARY_SUBSCR_TUPLE_INT,
780
};
781
782
specializing op(_SPECIALIZE_BINARY_SUBSCR, (counter/1, container, sub -- container, sub)) {
783
#if ENABLE_SPECIALIZATION_FT
784
assert(frame->stackpointer == NULL);
785
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
786
next_instr = this_instr;
787
_Py_Specialize_BinarySubscr(container, sub, next_instr);
788
DISPATCH_SAME_OPARG();
789
}
790
OPCODE_DEFERRED_INC(BINARY_SUBSCR);
791
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
792
#endif /* ENABLE_SPECIALIZATION_FT */
793
}
794
795
op(_BINARY_SUBSCR, (container, sub -- res)) {
796
PyObject *container_o = PyStackRef_AsPyObjectBorrow(container);
797
PyObject *sub_o = PyStackRef_AsPyObjectBorrow(sub);
798
799
PyObject *res_o = PyObject_GetItem(container_o, sub_o);
800
DECREF_INPUTS();
801
ERROR_IF(res_o == NULL, error);
802
res = PyStackRef_FromPyObjectSteal(res_o);
803
}
804
805
macro(BINARY_SUBSCR) = _SPECIALIZE_BINARY_SUBSCR + _BINARY_SUBSCR;
806
807
specializing op(_SPECIALIZE_BINARY_SLICE, (container, start, stop -- container, start, stop)) {
808
// Placeholder until we implement BINARY_SLICE specialization
809
#if ENABLE_SPECIALIZATION
810
OPCODE_DEFERRED_INC(BINARY_SLICE);
811
#endif /* ENABLE_SPECIALIZATION */
812
}
813
814
op(_BINARY_SLICE, (container, start, stop -- res)) {
815
PyObject *slice = _PyBuildSlice_ConsumeRefs(PyStackRef_AsPyObjectSteal(start),
816
PyStackRef_AsPyObjectSteal(stop));
817
PyObject *res_o;
818
// Can't use ERROR_IF() here, because we haven't
819
// DECREF'ed container yet, and we still own slice.
820
if (slice == NULL) {
821
res_o = NULL;
822
}
823
else {
824
res_o = PyObject_GetItem(PyStackRef_AsPyObjectBorrow(container), slice);
825
Py_DECREF(slice);
826
}
827
PyStackRef_CLOSE(container);
828
ERROR_IF(res_o == NULL, error);
829
res = PyStackRef_FromPyObjectSteal(res_o);
830
}
831
832
macro(BINARY_SLICE) = _SPECIALIZE_BINARY_SLICE + _BINARY_SLICE;
833
834
specializing op(_SPECIALIZE_STORE_SLICE, (v, container, start, stop -- v, container, start, stop)) {
835
// Placeholder until we implement STORE_SLICE specialization
836
#if ENABLE_SPECIALIZATION
837
OPCODE_DEFERRED_INC(STORE_SLICE);
838
#endif /* ENABLE_SPECIALIZATION */
839
}
840
841
op(_STORE_SLICE, (v, container, start, stop -- )) {
842
PyObject *slice = _PyBuildSlice_ConsumeRefs(PyStackRef_AsPyObjectSteal(start),
843
PyStackRef_AsPyObjectSteal(stop));
844
int err;
845
if (slice == NULL) {
846
err = 1;
847
}
848
else {
849
err = PyObject_SetItem(PyStackRef_AsPyObjectBorrow(container), slice, PyStackRef_AsPyObjectBorrow(v));
850
Py_DECREF(slice);
851
}
852
DECREF_INPUTS();
853
ERROR_IF(err, error);
854
}
855
856
macro(STORE_SLICE) = _SPECIALIZE_STORE_SLICE + _STORE_SLICE;
857
858
inst(BINARY_SUBSCR_LIST_INT, (unused/1, list_st, sub_st -- res)) {
859
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
860
PyObject *list = PyStackRef_AsPyObjectBorrow(list_st);
861
862
DEOPT_IF(!PyLong_CheckExact(sub));
863
DEOPT_IF(!PyList_CheckExact(list));
864
865
// Deopt unless 0 <= sub < PyList_Size(list)
866
DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
867
Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
868
#ifdef Py_GIL_DISABLED
869
PyObject *res_o = _PyList_GetItemRef((PyListObject*)list, index);
870
DEOPT_IF(res_o == NULL);
871
STAT_INC(BINARY_SUBSCR, hit);
872
#else
873
DEOPT_IF(index >= PyList_GET_SIZE(list));
874
STAT_INC(BINARY_SUBSCR, hit);
875
PyObject *res_o = PyList_GET_ITEM(list, index);
876
assert(res_o != NULL);
877
Py_INCREF(res_o);
878
#endif
879
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
880
DEAD(sub_st);
881
PyStackRef_CLOSE(list_st);
882
res = PyStackRef_FromPyObjectSteal(res_o);
883
}
884
885
inst(BINARY_SUBSCR_STR_INT, (unused/1, str_st, sub_st -- res)) {
886
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
887
PyObject *str = PyStackRef_AsPyObjectBorrow(str_st);
888
889
DEOPT_IF(!PyLong_CheckExact(sub));
890
DEOPT_IF(!PyUnicode_CheckExact(str));
891
DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
892
Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
893
DEOPT_IF(PyUnicode_GET_LENGTH(str) <= index);
894
// Specialize for reading an ASCII character from any string:
895
Py_UCS4 c = PyUnicode_READ_CHAR(str, index);
896
DEOPT_IF(Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c);
897
STAT_INC(BINARY_SUBSCR, hit);
898
PyObject *res_o = (PyObject*)&_Py_SINGLETON(strings).ascii[c];
899
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
900
DEAD(sub_st);
901
PyStackRef_CLOSE(str_st);
902
res = PyStackRef_FromPyObjectSteal(res_o);
903
}
904
905
inst(BINARY_SUBSCR_TUPLE_INT, (unused/1, tuple_st, sub_st -- res)) {
906
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
907
PyObject *tuple = PyStackRef_AsPyObjectBorrow(tuple_st);
908
909
DEOPT_IF(!PyLong_CheckExact(sub));
910
DEOPT_IF(!PyTuple_CheckExact(tuple));
911
912
// Deopt unless 0 <= sub < PyTuple_Size(list)
913
DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
914
Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
915
DEOPT_IF(index >= PyTuple_GET_SIZE(tuple));
916
STAT_INC(BINARY_SUBSCR, hit);
917
PyObject *res_o = PyTuple_GET_ITEM(tuple, index);
918
assert(res_o != NULL);
919
Py_INCREF(res_o);
920
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
921
DEAD(sub_st);
922
PyStackRef_CLOSE(tuple_st);
923
res = PyStackRef_FromPyObjectSteal(res_o);
924
}
925
926
inst(BINARY_SUBSCR_DICT, (unused/1, dict_st, sub_st -- res)) {
927
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
928
PyObject *dict = PyStackRef_AsPyObjectBorrow(dict_st);
929
930
DEOPT_IF(!PyDict_CheckExact(dict));
931
STAT_INC(BINARY_SUBSCR, hit);
932
PyObject *res_o;
933
int rc = PyDict_GetItemRef(dict, sub, &res_o);
934
if (rc == 0) {
935
_PyErr_SetKeyError(sub);
936
}
937
DECREF_INPUTS();
938
ERROR_IF(rc <= 0, error); // not found or error
939
res = PyStackRef_FromPyObjectSteal(res_o);
940
}
941
942
op(_BINARY_SUBSCR_CHECK_FUNC, (container, unused -- container, unused, getitem)) {
943
PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
944
DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE));
945
PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
946
PyObject *getitem_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(ht->_spec_cache.getitem);
947
DEOPT_IF(getitem_o == NULL);
948
assert(PyFunction_Check(getitem_o));
949
uint32_t cached_version = FT_ATOMIC_LOAD_UINT32_RELAXED(ht->_spec_cache.getitem_version);
950
DEOPT_IF(((PyFunctionObject *)getitem_o)->func_version != cached_version);
951
PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem_o);
952
assert(code->co_argcount == 2);
953
DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
954
getitem = PyStackRef_FromPyObjectNew(getitem_o);
955
STAT_INC(BINARY_SUBSCR, hit);
956
}
957
958
op(_BINARY_SUBSCR_INIT_CALL, (container, sub, getitem -- new_frame: _PyInterpreterFrame* )) {
959
new_frame = _PyFrame_PushUnchecked(tstate, getitem, 2, frame);
960
new_frame->localsplus[0] = container;
961
new_frame->localsplus[1] = sub;
962
INPUTS_DEAD();
963
frame->return_offset = INSTRUCTION_SIZE;
964
}
965
966
macro(BINARY_SUBSCR_GETITEM) =
967
unused/1 + // Skip over the counter
968
_CHECK_PEP_523 +
969
_BINARY_SUBSCR_CHECK_FUNC +
970
_BINARY_SUBSCR_INIT_CALL +
971
_PUSH_FRAME;
972
973
inst(LIST_APPEND, (list, unused[oparg-1], v -- list, unused[oparg-1])) {
974
int err = _PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list),
975
PyStackRef_AsPyObjectSteal(v));
976
ERROR_IF(err < 0, error);
977
}
978
979
inst(SET_ADD, (set, unused[oparg-1], v -- set, unused[oparg-1])) {
980
int err = PySet_Add(PyStackRef_AsPyObjectBorrow(set),
981
PyStackRef_AsPyObjectBorrow(v));
982
DECREF_INPUTS();
983
ERROR_IF(err, error);
984
}
985
986
family(STORE_SUBSCR, INLINE_CACHE_ENTRIES_STORE_SUBSCR) = {
987
STORE_SUBSCR_DICT,
988
STORE_SUBSCR_LIST_INT,
989
};
990
991
specializing op(_SPECIALIZE_STORE_SUBSCR, (counter/1, container, sub -- container, sub)) {
992
#if ENABLE_SPECIALIZATION_FT
993
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
994
next_instr = this_instr;
995
_Py_Specialize_StoreSubscr(container, sub, next_instr);
996
DISPATCH_SAME_OPARG();
997
}
998
OPCODE_DEFERRED_INC(STORE_SUBSCR);
999
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1000
#endif /* ENABLE_SPECIALIZATION_FT */